summaryrefslogtreecommitdiff
path: root/src/leap/soledad/u1db/tests
diff options
context:
space:
mode:
authordrebs <drebs@leap.se>2012-12-06 11:07:53 -0200
committerdrebs <drebs@leap.se>2012-12-06 11:07:53 -0200
commit584696e4dbfc13b793208dc4c5c6cdc224db5a12 (patch)
treea52830511908c8a135acc16095d61acba177873b /src/leap/soledad/u1db/tests
parent2cf00360bce0193d8fa73194a148c28426172043 (diff)
Remove u1db and swiftclient dirs and refactor.
Diffstat (limited to 'src/leap/soledad/u1db/tests')
-rw-r--r--src/leap/soledad/u1db/tests/__init__.py463
-rw-r--r--src/leap/soledad/u1db/tests/c_backend_wrapper.pyx1541
-rw-r--r--src/leap/soledad/u1db/tests/commandline/__init__.py47
-rw-r--r--src/leap/soledad/u1db/tests/commandline/test_client.py916
-rw-r--r--src/leap/soledad/u1db/tests/commandline/test_command.py105
-rw-r--r--src/leap/soledad/u1db/tests/commandline/test_serve.py101
-rw-r--r--src/leap/soledad/u1db/tests/test_auth_middleware.py309
-rw-r--r--src/leap/soledad/u1db/tests/test_backends.py1895
-rw-r--r--src/leap/soledad/u1db/tests/test_c_backend.py634
-rw-r--r--src/leap/soledad/u1db/tests/test_common_backend.py33
-rw-r--r--src/leap/soledad/u1db/tests/test_document.py148
-rw-r--r--src/leap/soledad/u1db/tests/test_errors.py61
-rw-r--r--src/leap/soledad/u1db/tests/test_http_app.py1133
-rw-r--r--src/leap/soledad/u1db/tests/test_http_client.py361
-rw-r--r--src/leap/soledad/u1db/tests/test_http_database.py256
-rw-r--r--src/leap/soledad/u1db/tests/test_https.py117
-rw-r--r--src/leap/soledad/u1db/tests/test_inmemory.py128
-rw-r--r--src/leap/soledad/u1db/tests/test_open.py69
-rw-r--r--src/leap/soledad/u1db/tests/test_query_parser.py443
-rw-r--r--src/leap/soledad/u1db/tests/test_remote_sync_target.py314
-rw-r--r--src/leap/soledad/u1db/tests/test_remote_utils.py36
-rw-r--r--src/leap/soledad/u1db/tests/test_server_state.py93
-rw-r--r--src/leap/soledad/u1db/tests/test_sqlite_backend.py493
-rw-r--r--src/leap/soledad/u1db/tests/test_sync.py1285
-rw-r--r--src/leap/soledad/u1db/tests/test_test_infrastructure.py41
-rw-r--r--src/leap/soledad/u1db/tests/test_vectorclock.py121
-rw-r--r--src/leap/soledad/u1db/tests/testing-certs/Makefile35
-rw-r--r--src/leap/soledad/u1db/tests/testing-certs/cacert.pem58
-rw-r--r--src/leap/soledad/u1db/tests/testing-certs/testing.cert61
-rw-r--r--src/leap/soledad/u1db/tests/testing-certs/testing.key16
30 files changed, 0 insertions, 11313 deletions
diff --git a/src/leap/soledad/u1db/tests/__init__.py b/src/leap/soledad/u1db/tests/__init__.py
deleted file mode 100644
index b8e16b15..00000000
--- a/src/leap/soledad/u1db/tests/__init__.py
+++ /dev/null
@@ -1,463 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test infrastructure for U1DB"""
-
-import copy
-import shutil
-import socket
-import tempfile
-import threading
-
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-from wsgiref import simple_server
-
-from oauth import oauth
-from sqlite3 import dbapi2
-from StringIO import StringIO
-
-import testscenarios
-import testtools
-
-from u1db import (
- errors,
- Document,
- )
-from u1db.backends import (
- inmemory,
- sqlite_backend,
- )
-from u1db.remote import (
- server_state,
- )
-
-try:
- from u1db.tests import c_backend_wrapper
- c_backend_error = None
-except ImportError, e:
- c_backend_wrapper = None # noqa
- c_backend_error = e
-
-# Setting this means that failing assertions will not include this module in
-# their traceback. However testtools doesn't seem to set it, and we don't want
-# this level to be omitted, but the lower levels to be shown.
-# __unittest = 1
-
-
-class TestCase(testtools.TestCase):
-
- def createTempDir(self, prefix='u1db-tmp-'):
- """Create a temporary directory to do some work in.
-
- This directory will be scheduled for cleanup when the test ends.
- """
- tempdir = tempfile.mkdtemp(prefix=prefix)
- self.addCleanup(shutil.rmtree, tempdir)
- return tempdir
-
- def make_document(self, doc_id, doc_rev, content, has_conflicts=False):
- return self.make_document_for_test(
- self, doc_id, doc_rev, content, has_conflicts)
-
- def make_document_for_test(self, test, doc_id, doc_rev, content,
- has_conflicts):
- return make_document_for_test(
- test, doc_id, doc_rev, content, has_conflicts)
-
- def assertGetDoc(self, db, doc_id, doc_rev, content, has_conflicts):
- """Assert that the document in the database looks correct."""
- exp_doc = self.make_document(doc_id, doc_rev, content,
- has_conflicts=has_conflicts)
- self.assertEqual(exp_doc, db.get_doc(doc_id))
-
- def assertGetDocIncludeDeleted(self, db, doc_id, doc_rev, content,
- has_conflicts):
- """Assert that the document in the database looks correct."""
- exp_doc = self.make_document(doc_id, doc_rev, content,
- has_conflicts=has_conflicts)
- self.assertEqual(exp_doc, db.get_doc(doc_id, include_deleted=True))
-
- def assertGetDocConflicts(self, db, doc_id, conflicts):
- """Assert what conflicts are stored for a given doc_id.
-
- :param conflicts: A list of (doc_rev, content) pairs.
- The first item must match the first item returned from the
- database, however the rest can be returned in any order.
- """
- if conflicts:
- conflicts = [(rev, (json.loads(cont) if isinstance(cont, basestring)
- else cont)) for (rev, cont) in conflicts]
- conflicts = conflicts[:1] + sorted(conflicts[1:])
- actual = db.get_doc_conflicts(doc_id)
- if actual:
- actual = [(doc.rev, (json.loads(doc.get_json())
- if doc.get_json() is not None else None)) for doc in actual]
- actual = actual[:1] + sorted(actual[1:])
- self.assertEqual(conflicts, actual)
-
-
-def multiply_scenarios(a_scenarios, b_scenarios):
- """Create the cross-product of scenarios."""
-
- all_scenarios = []
- for a_name, a_attrs in a_scenarios:
- for b_name, b_attrs in b_scenarios:
- name = '%s,%s' % (a_name, b_name)
- attrs = dict(a_attrs)
- attrs.update(b_attrs)
- all_scenarios.append((name, attrs))
- return all_scenarios
-
-
-simple_doc = '{"key": "value"}'
-nested_doc = '{"key": "value", "sub": {"doc": "underneath"}}'
-
-
-def make_memory_database_for_test(test, replica_uid):
- return inmemory.InMemoryDatabase(replica_uid)
-
-
-def copy_memory_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- new_db = inmemory.InMemoryDatabase(db._replica_uid)
- new_db._transaction_log = db._transaction_log[:]
- new_db._docs = copy.deepcopy(db._docs)
- new_db._conflicts = copy.deepcopy(db._conflicts)
- new_db._indexes = copy.deepcopy(db._indexes)
- new_db._factory = db._factory
- return new_db
-
-
-def make_sqlite_partial_expanded_for_test(test, replica_uid):
- db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- db._set_replica_uid(replica_uid)
- return db
-
-
-def copy_sqlite_partial_expanded_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- new_db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- tmpfile = StringIO()
- for line in db._db_handle.iterdump():
- if not 'sqlite_sequence' in line: # work around bug in iterdump
- tmpfile.write('%s\n' % line)
- tmpfile.seek(0)
- new_db._db_handle = dbapi2.connect(':memory:')
- new_db._db_handle.cursor().executescript(tmpfile.read())
- new_db._db_handle.commit()
- new_db._set_replica_uid(db._replica_uid)
- new_db._factory = db._factory
- return new_db
-
-
-def make_document_for_test(test, doc_id, rev, content, has_conflicts=False):
- return Document(doc_id, rev, content, has_conflicts=has_conflicts)
-
-
-def make_c_database_for_test(test, replica_uid):
- if c_backend_wrapper is None:
- test.skipTest('c_backend_wrapper is not available')
- db = c_backend_wrapper.CDatabase(':memory:')
- db._set_replica_uid(replica_uid)
- return db
-
-
-def copy_c_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- if c_backend_wrapper is None:
- test.skipTest('c_backend_wrapper is not available')
- new_db = db._copy(db)
- return new_db
-
-
-def make_c_document_for_test(test, doc_id, rev, content, has_conflicts=False):
- if c_backend_wrapper is None:
- test.skipTest('c_backend_wrapper is not available')
- return c_backend_wrapper.make_document(
- doc_id, rev, content, has_conflicts=has_conflicts)
-
-
-LOCAL_DATABASES_SCENARIOS = [
- ('mem', {'make_database_for_test': make_memory_database_for_test,
- 'copy_database_for_test': copy_memory_database_for_test,
- 'make_document_for_test': make_document_for_test}),
- ('sql', {'make_database_for_test':
- make_sqlite_partial_expanded_for_test,
- 'copy_database_for_test':
- copy_sqlite_partial_expanded_for_test,
- 'make_document_for_test': make_document_for_test}),
- ]
-
-
-C_DATABASE_SCENARIOS = [
- ('c', {'make_database_for_test': make_c_database_for_test,
- 'copy_database_for_test': copy_c_database_for_test,
- 'make_document_for_test': make_c_document_for_test})]
-
-
-class DatabaseBaseTests(TestCase):
-
- accept_fixed_trans_id = False # set to True assertTransactionLog
- # is happy with all trans ids = ''
-
- scenarios = LOCAL_DATABASES_SCENARIOS
-
- def create_database(self, replica_uid):
- return self.make_database_for_test(self, replica_uid)
-
- def copy_database(self, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES
- # IS THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST
- # THAT WE CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS
- # RATHER THAN CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND
- # NINJA TO YOUR HOUSE.
- return self.copy_database_for_test(self, db)
-
- def setUp(self):
- super(DatabaseBaseTests, self).setUp()
- self.db = self.create_database('test')
-
- def tearDown(self):
- # TODO: Add close_database parameterization
- # self.close_database(self.db)
- super(DatabaseBaseTests, self).tearDown()
-
- def assertTransactionLog(self, doc_ids, db):
- """Assert that the given docs are in the transaction log."""
- log = db._get_transaction_log()
- just_ids = []
- seen_transactions = set()
- for doc_id, transaction_id in log:
- just_ids.append(doc_id)
- self.assertIsNot(None, transaction_id,
- "Transaction id should not be None")
- if transaction_id == '' and self.accept_fixed_trans_id:
- continue
- self.assertNotEqual('', transaction_id,
- "Transaction id should be a unique string")
- self.assertTrue(transaction_id.startswith('T-'))
- self.assertNotIn(transaction_id, seen_transactions)
- seen_transactions.add(transaction_id)
- self.assertEqual(doc_ids, just_ids)
-
- def getLastTransId(self, db):
- """Return the transaction id for the last database update."""
- return self.db._get_transaction_log()[-1][-1]
-
-
-class ServerStateForTests(server_state.ServerState):
- """Used in the test suite, so we don't have to touch disk, etc."""
-
- def __init__(self):
- super(ServerStateForTests, self).__init__()
- self._dbs = {}
-
- def open_database(self, path):
- try:
- return self._dbs[path]
- except KeyError:
- raise errors.DatabaseDoesNotExist
-
- def check_database(self, path):
- # cares only about the possible exception
- self.open_database(path)
-
- def ensure_database(self, path):
- try:
- db = self.open_database(path)
- except errors.DatabaseDoesNotExist:
- db = self._create_database(path)
- return db, db._replica_uid
-
- def _copy_database(self, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES
- # IS THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST
- # THAT WE CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS
- # RATHER THAN CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND
- # NINJA TO YOUR HOUSE.
- new_db = copy_memory_database_for_test(None, db)
- path = db._replica_uid
- while path in self._dbs:
- path += 'copy'
- self._dbs[path] = new_db
- return new_db
-
- def _create_database(self, path):
- db = inmemory.InMemoryDatabase(path)
- self._dbs[path] = db
- return db
-
- def delete_database(self, path):
- del self._dbs[path]
-
-
-class ResponderForTests(object):
- """Responder for tests."""
- _started = False
- sent_response = False
- status = None
-
- def start_response(self, status='success', **kwargs):
- self._started = True
- self.status = status
- self.kwargs = kwargs
-
- def send_response(self, status='success', **kwargs):
- self.start_response(status, **kwargs)
- self.finish_response()
-
- def finish_response(self):
- self.sent_response = True
-
-
-class TestCaseWithServer(TestCase):
-
- @staticmethod
- def server_def():
- # hook point
- # should return (ServerClass, "shutdown method name", "url_scheme")
- class _RequestHandler(simple_server.WSGIRequestHandler):
- def log_request(*args):
- pass # suppress
-
- def make_server(host_port, application):
- assert application, "forgot to override make_app(_with_state)?"
- srv = simple_server.WSGIServer(host_port, _RequestHandler)
- # patch the value in if it's None
- if getattr(application, 'base_url', 1) is None:
- application.base_url = "http://%s:%s" % srv.server_address
- srv.set_app(application)
- return srv
-
- return make_server, "shutdown", "http"
-
- @staticmethod
- def make_app_with_state(state):
- # hook point
- return None
-
- def make_app(self):
- # potential hook point
- self.request_state = ServerStateForTests()
- return self.make_app_with_state(self.request_state)
-
- def setUp(self):
- super(TestCaseWithServer, self).setUp()
- self.server = self.server_thread = None
-
- @property
- def url_scheme(self):
- return self.server_def()[-1]
-
- def startServer(self):
- server_def = self.server_def()
- server_class, shutdown_meth, _ = server_def
- application = self.make_app()
- self.server = server_class(('127.0.0.1', 0), application)
- self.server_thread = threading.Thread(target=self.server.serve_forever,
- kwargs=dict(poll_interval=0.01))
- self.server_thread.start()
- self.addCleanup(self.server_thread.join)
- self.addCleanup(getattr(self.server, shutdown_meth))
-
- def getURL(self, path=None):
- host, port = self.server.server_address
- if path is None:
- path = ''
- return '%s://%s:%s/%s' % (self.url_scheme, host, port, path)
-
-
-def socket_pair():
- """Return a pair of TCP sockets connected to each other.
-
- Unlike socket.socketpair, this should work on Windows.
- """
- sock_pair = getattr(socket, 'socket_pair', None)
- if sock_pair:
- return sock_pair(socket.AF_INET, socket.SOCK_STREAM)
- listen_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- listen_sock.bind(('127.0.0.1', 0))
- listen_sock.listen(1)
- client_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- client_sock.connect(listen_sock.getsockname())
- server_sock, addr = listen_sock.accept()
- listen_sock.close()
- return server_sock, client_sock
-
-
-# OAuth related testing
-
-consumer1 = oauth.OAuthConsumer('K1', 'S1')
-token1 = oauth.OAuthToken('kkkk1', 'XYZ')
-consumer2 = oauth.OAuthConsumer('K2', 'S2')
-token2 = oauth.OAuthToken('kkkk2', 'ZYX')
-token3 = oauth.OAuthToken('kkkk3', 'ZYX')
-
-
-class TestingOAuthDataStore(oauth.OAuthDataStore):
- """In memory predefined OAuthDataStore for testing."""
-
- consumers = {
- consumer1.key: consumer1,
- consumer2.key: consumer2,
- }
-
- tokens = {
- token1.key: token1,
- token2.key: token2
- }
-
- def lookup_consumer(self, key):
- return self.consumers.get(key)
-
- def lookup_token(self, token_type, token_token):
- return self.tokens.get(token_token)
-
- def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
- return None
-
-testingOAuthStore = TestingOAuthDataStore()
-
-sign_meth_HMAC_SHA1 = oauth.OAuthSignatureMethod_HMAC_SHA1()
-sign_meth_PLAINTEXT = oauth.OAuthSignatureMethod_PLAINTEXT()
-
-
-def load_with_scenarios(loader, standard_tests, pattern):
- """Load the tests in a given module.
-
- This just applies testscenarios.generate_scenarios to all the tests that
- are present. We do it at load time rather than at run time, because it
- plays nicer with various tools.
- """
- suite = loader.suiteClass()
- suite.addTests(testscenarios.generate_scenarios(standard_tests))
- return suite
diff --git a/src/leap/soledad/u1db/tests/c_backend_wrapper.pyx b/src/leap/soledad/u1db/tests/c_backend_wrapper.pyx
deleted file mode 100644
index 8a4b600d..00000000
--- a/src/leap/soledad/u1db/tests/c_backend_wrapper.pyx
+++ /dev/null
@@ -1,1541 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-#
-"""A Cython wrapper around the C implementation of U1DB Database backend."""
-
-cdef extern from "Python.h":
- object PyString_FromStringAndSize(char *s, Py_ssize_t n)
- int PyString_AsStringAndSize(object o, char **buf, Py_ssize_t *length
- ) except -1
- char *PyString_AsString(object) except NULL
- char *PyString_AS_STRING(object)
- char *strdup(char *)
- void *calloc(size_t, size_t)
- void free(void *)
- ctypedef struct FILE:
- pass
- fprintf(FILE *, char *, ...)
- FILE *stderr
- size_t strlen(char *)
-
-cdef extern from "stdarg.h":
- ctypedef struct va_list:
- pass
- void va_start(va_list, void*)
- void va_start_int "va_start" (va_list, int)
- void va_end(va_list)
-
-cdef extern from "u1db/u1db.h":
- ctypedef struct u1database:
- pass
- ctypedef struct u1db_document:
- char *doc_id
- size_t doc_id_len
- char *doc_rev
- size_t doc_rev_len
- char *json
- size_t json_len
- int has_conflicts
- # Note: u1query is actually defined in u1db_internal.h, and in u1db.h it is
- # just an opaque pointer. However, older versions of Cython don't let
- # you have a forward declaration and a full declaration, so we just
- # expose the whole thing here.
- ctypedef struct u1query:
- char *index_name
- int num_fields
- char **fields
- cdef struct u1db_oauth_creds:
- int auth_kind
- char *consumer_key
- char *consumer_secret
- char *token_key
- char *token_secret
- ctypedef union u1db_creds
- ctypedef u1db_creds* const_u1db_creds_ptr "const u1db_creds *"
-
- ctypedef char* const_char_ptr "const char*"
- ctypedef int (*u1db_doc_callback)(void *context, u1db_document *doc)
- ctypedef int (*u1db_key_callback)(void *context, int num_fields,
- const_char_ptr *key)
- ctypedef int (*u1db_doc_gen_callback)(void *context,
- u1db_document *doc, int gen, const_char_ptr trans_id)
- ctypedef int (*u1db_trans_info_callback)(void *context,
- const_char_ptr doc_id, int gen, const_char_ptr trans_id)
-
- u1database * u1db_open(char *fname)
- void u1db_free(u1database **)
- int u1db_set_replica_uid(u1database *, char *replica_uid)
- int u1db_set_document_size_limit(u1database *, int limit)
- int u1db_get_replica_uid(u1database *, const_char_ptr *replica_uid)
- int u1db_create_doc_from_json(u1database *db, char *json, char *doc_id,
- u1db_document **doc)
- int u1db_delete_doc(u1database *db, u1db_document *doc)
- int u1db_get_doc(u1database *db, char *doc_id, int include_deleted,
- u1db_document **doc)
- int u1db_get_docs(u1database *db, int n_doc_ids, const_char_ptr *doc_ids,
- int check_for_conflicts, int include_deleted,
- void *context, u1db_doc_callback cb)
- int u1db_get_all_docs(u1database *db, int include_deleted, int *generation,
- void *context, u1db_doc_callback cb)
- int u1db_put_doc(u1database *db, u1db_document *doc)
- int u1db__validate_source(u1database *db, const_char_ptr replica_uid,
- int replica_gen, const_char_ptr replica_trans_id)
- int u1db__put_doc_if_newer(u1database *db, u1db_document *doc,
- int save_conflict, char *replica_uid,
- int replica_gen, char *replica_trans_id,
- int *state, int *at_gen)
- int u1db_resolve_doc(u1database *db, u1db_document *doc,
- int n_revs, const_char_ptr *revs)
- int u1db_delete_doc(u1database *db, u1db_document *doc)
- int u1db_whats_changed(u1database *db, int *gen, char **trans_id,
- void *context, u1db_trans_info_callback cb)
- int u1db__get_transaction_log(u1database *db, void *context,
- u1db_trans_info_callback cb)
- int u1db_get_doc_conflicts(u1database *db, char *doc_id, void *context,
- u1db_doc_callback cb)
- int u1db_sync(u1database *db, const_char_ptr url,
- const_u1db_creds_ptr creds, int *local_gen) nogil
- int u1db_create_index_list(u1database *db, char *index_name,
- int n_expressions, const_char_ptr *expressions)
- int u1db_create_index(u1database *db, char *index_name, int n_expressions,
- ...)
- int u1db_get_from_index_list(u1database *db, u1query *query, void *context,
- u1db_doc_callback cb, int n_values,
- const_char_ptr *values)
- int u1db_get_from_index(u1database *db, u1query *query, void *context,
- u1db_doc_callback cb, int n_values, char *val0,
- ...)
- int u1db_get_range_from_index(u1database *db, u1query *query,
- void *context, u1db_doc_callback cb,
- int n_values, const_char_ptr *start_values,
- const_char_ptr *end_values)
- int u1db_delete_index(u1database *db, char *index_name)
- int u1db_list_indexes(u1database *db, void *context,
- int (*cb)(void *context, const_char_ptr index_name,
- int n_expressions, const_char_ptr *expressions))
- int u1db_get_index_keys(u1database *db, char *index_name, void *context,
- u1db_key_callback cb)
- int u1db_simple_lookup1(u1database *db, char *index_name, char *val1,
- void *context, u1db_doc_callback cb)
- int u1db_query_init(u1database *db, char *index_name, u1query **query)
- void u1db_free_query(u1query **query)
-
- int U1DB_OK
- int U1DB_INVALID_PARAMETER
- int U1DB_REVISION_CONFLICT
- int U1DB_INVALID_DOC_ID
- int U1DB_DOCUMENT_ALREADY_DELETED
- int U1DB_DOCUMENT_DOES_NOT_EXIST
- int U1DB_NOT_IMPLEMENTED
- int U1DB_INVALID_JSON
- int U1DB_DOCUMENT_TOO_BIG
- int U1DB_USER_QUOTA_EXCEEDED
- int U1DB_INVALID_VALUE_FOR_INDEX
- int U1DB_INVALID_FIELD_SPECIFIER
- int U1DB_INVALID_GLOBBING
- int U1DB_BROKEN_SYNC_STREAM
- int U1DB_DUPLICATE_INDEX_NAME
- int U1DB_INDEX_DOES_NOT_EXIST
- int U1DB_INVALID_GENERATION
- int U1DB_INVALID_TRANSACTION_ID
- int U1DB_INVALID_TRANSFORMATION_FUNCTION
- int U1DB_UNKNOWN_OPERATION
- int U1DB_INTERNAL_ERROR
- int U1DB_TARGET_UNAVAILABLE
-
- int U1DB_INSERTED
- int U1DB_SUPERSEDED
- int U1DB_CONVERGED
- int U1DB_CONFLICTED
-
- int U1DB_OAUTH_AUTH
-
- void u1db_free_doc(u1db_document **doc)
- int u1db_doc_set_json(u1db_document *doc, char *json)
- int u1db_doc_get_size(u1db_document *doc)
-
-
-cdef extern from "u1db/u1db_internal.h":
- ctypedef struct u1db_row:
- u1db_row *next
- int num_columns
- int *column_sizes
- unsigned char **columns
-
- ctypedef struct u1db_table:
- int status
- u1db_row *first_row
-
- ctypedef struct u1db_record:
- u1db_record *next
- char *doc_id
- char *doc_rev
- char *doc
-
- ctypedef struct u1db_sync_exchange:
- int target_gen
- int num_doc_ids
- char **doc_ids_to_return
- int *gen_for_doc_ids
- const_char_ptr *trans_ids_for_doc_ids
-
- ctypedef int (*u1db__trace_callback)(void *context, const_char_ptr state)
- ctypedef struct u1db_sync_target:
- int (*get_sync_info)(u1db_sync_target *st, char *source_replica_uid,
- const_char_ptr *st_replica_uid, int *st_gen,
- char **st_trans_id, int *source_gen,
- char **source_trans_id) nogil
- int (*record_sync_info)(u1db_sync_target *st,
- char *source_replica_uid, int source_gen, char *trans_id) nogil
- int (*sync_exchange)(u1db_sync_target *st,
- char *source_replica_uid, int n_docs,
- u1db_document **docs, int *generations,
- const_char_ptr *trans_ids,
- int *target_gen, char **target_trans_id,
- void *context, u1db_doc_gen_callback cb,
- void *ensure_callback) nogil
- int (*sync_exchange_doc_ids)(u1db_sync_target *st,
- u1database *source_db, int n_doc_ids,
- const_char_ptr *doc_ids, int *generations,
- const_char_ptr *trans_ids,
- int *target_gen, char **target_trans_id,
- void *context,
- u1db_doc_gen_callback cb,
- void *ensure_callback) nogil
- int (*get_sync_exchange)(u1db_sync_target *st,
- char *source_replica_uid,
- int last_known_source_gen,
- u1db_sync_exchange **exchange) nogil
- void (*finalize_sync_exchange)(u1db_sync_target *st,
- u1db_sync_exchange **exchange) nogil
- int (*_set_trace_hook)(u1db_sync_target *st,
- void *context, u1db__trace_callback cb) nogil
-
-
- void u1db__set_zero_delays()
- int u1db__get_generation(u1database *, int *db_rev)
- int u1db__get_document_size_limit(u1database *, int *limit)
- int u1db__get_generation_info(u1database *, int *db_rev, char **trans_id)
- int u1db__get_trans_id_for_gen(u1database *, int db_rev, char **trans_id)
- int u1db_validate_gen_and_trans_id(u1database *, int db_rev,
- const_char_ptr trans_id)
- char *u1db__allocate_doc_id(u1database *)
- int u1db__sql_close(u1database *)
- u1database *u1db__copy(u1database *)
- int u1db__sql_is_open(u1database *)
- u1db_table *u1db__sql_run(u1database *, char *sql, size_t n)
- void u1db__free_table(u1db_table **table)
- u1db_record *u1db__create_record(char *doc_id, char *doc_rev, char *doc)
- void u1db__free_records(u1db_record **)
-
- int u1db__allocate_document(char *doc_id, char *revision, char *content,
- int has_conflicts, u1db_document **result)
- int u1db__generate_hex_uuid(char *)
-
- int u1db__get_replica_gen_and_trans_id(u1database *db, char *replica_uid,
- int *generation, char **trans_id)
- int u1db__set_replica_gen_and_trans_id(u1database *db, char *replica_uid,
- int generation, char *trans_id)
- int u1db__sync_get_machine_info(u1database *db, char *other_replica_uid,
- int *other_db_rev, char **my_replica_uid,
- int *my_db_rev)
- int u1db__sync_record_machine_info(u1database *db, char *replica_uid,
- int db_rev)
- int u1db__sync_exchange_seen_ids(u1db_sync_exchange *se, int *n_ids,
- const_char_ptr **doc_ids)
- int u1db__format_query(int n_fields, const_char_ptr *values, char **buf,
- int *wildcard)
- int u1db__get_sync_target(u1database *db, u1db_sync_target **sync_target)
- int u1db__free_sync_target(u1db_sync_target **sync_target)
- int u1db__sync_db_to_target(u1database *db, u1db_sync_target *target,
- int *local_gen_before_sync) nogil
-
- int u1db__sync_exchange_insert_doc_from_source(u1db_sync_exchange *se,
- u1db_document *doc, int source_gen, const_char_ptr trans_id)
- int u1db__sync_exchange_find_doc_ids_to_return(u1db_sync_exchange *se)
- int u1db__sync_exchange_return_docs(u1db_sync_exchange *se, void *context,
- int (*cb)(void *context,
- u1db_document *doc, int gen,
- const_char_ptr trans_id))
- int u1db__create_http_sync_target(char *url, u1db_sync_target **target)
- int u1db__create_oauth_http_sync_target(char *url,
- char *consumer_key, char *consumer_secret,
- char *token_key, char *token_secret,
- u1db_sync_target **target)
-
-cdef extern from "u1db/u1db_http_internal.h":
- int u1db__format_sync_url(u1db_sync_target *st,
- const_char_ptr source_replica_uid, char **sync_url)
- int u1db__get_oauth_authorization(u1db_sync_target *st,
- char *http_method, char *url,
- char **oauth_authorization)
-
-
-cdef extern from "u1db/u1db_vectorclock.h":
- ctypedef struct u1db_vectorclock_item:
- char *replica_uid
- int generation
-
- ctypedef struct u1db_vectorclock:
- int num_items
- u1db_vectorclock_item *items
-
- u1db_vectorclock *u1db__vectorclock_from_str(char *s)
- void u1db__free_vectorclock(u1db_vectorclock **clock)
- int u1db__vectorclock_increment(u1db_vectorclock *clock, char *replica_uid)
- int u1db__vectorclock_maximize(u1db_vectorclock *clock,
- u1db_vectorclock *other)
- int u1db__vectorclock_as_str(u1db_vectorclock *clock, char **result)
- int u1db__vectorclock_is_newer(u1db_vectorclock *maybe_newer,
- u1db_vectorclock *older)
-
-from u1db import errors
-from sqlite3 import dbapi2
-
-
-cdef int _append_trans_info_to_list(void *context, const_char_ptr doc_id,
- int generation,
- const_char_ptr trans_id) with gil:
- a_list = <object>(context)
- doc = doc_id
- a_list.append((doc, generation, trans_id))
- return 0
-
-
-cdef int _append_doc_to_list(void *context, u1db_document *doc) with gil:
- a_list = <object>context
- pydoc = CDocument()
- pydoc._doc = doc
- a_list.append(pydoc)
- return 0
-
-cdef int _append_key_to_list(void *context, int num_fields,
- const_char_ptr *key) with gil:
- a_list = <object>(context)
- field_list = []
- for i from 0 <= i < num_fields:
- field = key[i]
- field_list.append(field.decode('utf-8'))
- a_list.append(tuple(field_list))
- return 0
-
-cdef _list_to_array(lst, const_char_ptr **res, int *count):
- cdef const_char_ptr *tmp
- count[0] = len(lst)
- tmp = <const_char_ptr*>calloc(sizeof(char*), count[0])
- for idx, x in enumerate(lst):
- tmp[idx] = x
- res[0] = tmp
-
-cdef _list_to_str_array(lst, const_char_ptr **res, int *count):
- cdef const_char_ptr *tmp
- count[0] = len(lst)
- tmp = <const_char_ptr*>calloc(sizeof(char*), count[0])
- new_objs = []
- for idx, x in enumerate(lst):
- if isinstance(x, unicode):
- x = x.encode('utf-8')
- new_objs.append(x)
- tmp[idx] = x
- res[0] = tmp
- return new_objs
-
-
-cdef int _append_index_definition_to_list(void *context,
- const_char_ptr index_name, int n_expressions,
- const_char_ptr *expressions) with gil:
- cdef int i
-
- a_list = <object>(context)
- exp_list = []
- for i from 0 <= i < n_expressions:
- s = expressions[i]
- exp_list.append(s.decode('utf-8'))
- a_list.append((index_name, exp_list))
- return 0
-
-
-cdef int return_doc_cb_wrapper(void *context, u1db_document *doc,
- int gen, const_char_ptr trans_id) with gil:
- cdef CDocument pydoc
- user_cb = <object>context
- pydoc = CDocument()
- pydoc._doc = doc
- try:
- user_cb(pydoc, gen, trans_id)
- except Exception, e:
- # We suppress the exception here, because intermediating through the C
- # layer gets a bit crazy
- return U1DB_INVALID_PARAMETER
- return U1DB_OK
-
-
-cdef int _trace_hook(void *context, const_char_ptr state) with gil:
- if context == NULL:
- return U1DB_INVALID_PARAMETER
- ctx = <object>context
- try:
- ctx(state)
- except:
- # Note: It would be nice if we could map the Python exception into
- # something in C
- return U1DB_INTERNAL_ERROR
- return U1DB_OK
-
-
-cdef char *_ensure_str(object obj, object extra_objs) except NULL:
- """Ensure that we have the UTF-8 representation of a parameter.
-
- :param obj: A Unicode or String object.
- :param extra_objs: This should be a Python list. If we have to convert obj
- from being a Unicode object, this will hold the PyString object so that
- we know the char* lifetime will be correct.
- :return: A C pointer to the UTF-8 representation.
- """
- if isinstance(obj, unicode):
- obj = obj.encode('utf-8')
- extra_objs.append(obj)
- return PyString_AsString(obj)
-
-
-def _format_query(fields):
- """Wrapper around u1db__format_query for testing."""
- cdef int status
- cdef char *buf
- cdef int wildcard[10]
- cdef const_char_ptr *values
- cdef int n_values
-
- # keep a reference to new_objs so that the pointers in expressions
- # remain valid.
- new_objs = _list_to_str_array(fields, &values, &n_values)
- try:
- status = u1db__format_query(n_values, values, &buf, wildcard)
- finally:
- free(<void*>values)
- handle_status("format_query", status)
- if buf == NULL:
- res = None
- else:
- res = buf
- free(buf)
- w = []
- for i in range(len(fields)):
- w.append(wildcard[i])
- return res, w
-
-
-def make_document(doc_id, rev, content, has_conflicts=False):
- cdef u1db_document *doc
- cdef char *c_content = NULL, *c_rev = NULL, *c_doc_id = NULL
- cdef int conflict
-
- if has_conflicts:
- conflict = 1
- else:
- conflict = 0
- if doc_id is None:
- c_doc_id = NULL
- else:
- c_doc_id = doc_id
- if content is None:
- c_content = NULL
- else:
- c_content = content
- if rev is None:
- c_rev = NULL
- else:
- c_rev = rev
- handle_status(
- "make_document",
- u1db__allocate_document(c_doc_id, c_rev, c_content, conflict, &doc))
- pydoc = CDocument()
- pydoc._doc = doc
- return pydoc
-
-
-def generate_hex_uuid():
- uuid = PyString_FromStringAndSize(NULL, 32)
- handle_status(
- "Failed to generate uuid",
- u1db__generate_hex_uuid(PyString_AS_STRING(uuid)))
- return uuid
-
-
-cdef class CDocument(object):
- """A thin wrapper around the C Document struct."""
-
- cdef u1db_document *_doc
-
- def __init__(self):
- self._doc = NULL
-
- def __dealloc__(self):
- u1db_free_doc(&self._doc)
-
- property doc_id:
- def __get__(self):
- if self._doc.doc_id == NULL:
- return None
- return PyString_FromStringAndSize(
- self._doc.doc_id, self._doc.doc_id_len)
-
- property rev:
- def __get__(self):
- if self._doc.doc_rev == NULL:
- return None
- return PyString_FromStringAndSize(
- self._doc.doc_rev, self._doc.doc_rev_len)
-
- def get_json(self):
- if self._doc.json == NULL:
- return None
- return PyString_FromStringAndSize(
- self._doc.json, self._doc.json_len)
-
- def set_json(self, val):
- u1db_doc_set_json(self._doc, val)
-
- def get_size(self):
- return u1db_doc_get_size(self._doc)
-
- property has_conflicts:
- def __get__(self):
- if self._doc.has_conflicts:
- return True
- return False
-
- def __repr__(self):
- if self._doc.has_conflicts:
- extra = ', conflicted'
- else:
- extra = ''
- return '%s(%s, %s%s, %r)' % (self.__class__.__name__, self.doc_id,
- self.rev, extra, self.get_json())
-
- def __hash__(self):
- raise NotImplementedError(self.__hash__)
-
- def __richcmp__(self, other, int t):
- try:
- if t == 0: # Py_LT <
- return ((self.doc_id, self.rev, self.get_json())
- < (other.doc_id, other.rev, other.get_json()))
- elif t == 2: # Py_EQ ==
- return (self.doc_id == other.doc_id
- and self.rev == other.rev
- and self.get_json() == other.get_json()
- and self.has_conflicts == other.has_conflicts)
- except AttributeError:
- # Fall through to NotImplemented
- pass
-
- return NotImplemented
-
-
-cdef object safe_str(const_char_ptr s):
- if s == NULL:
- return None
- return s
-
-
-cdef class CQuery:
-
- cdef u1query *_query
-
- def __init__(self):
- self._query = NULL
-
- def __dealloc__(self):
- u1db_free_query(&self._query)
-
- def _check(self):
- if self._query == NULL:
- raise RuntimeError("No valid _query.")
-
- property index_name:
- def __get__(self):
- self._check()
- return safe_str(self._query.index_name)
-
- property num_fields:
- def __get__(self):
- self._check()
- return self._query.num_fields
-
- property fields:
- def __get__(self):
- cdef int i
- self._check()
- fields = []
- for i from 0 <= i < self._query.num_fields:
- fields.append(safe_str(self._query.fields[i]))
- return fields
-
-
-cdef handle_status(context, int status):
- if status == U1DB_OK:
- return
- if status == U1DB_REVISION_CONFLICT:
- raise errors.RevisionConflict()
- if status == U1DB_INVALID_DOC_ID:
- raise errors.InvalidDocId()
- if status == U1DB_DOCUMENT_ALREADY_DELETED:
- raise errors.DocumentAlreadyDeleted()
- if status == U1DB_DOCUMENT_DOES_NOT_EXIST:
- raise errors.DocumentDoesNotExist()
- if status == U1DB_INVALID_PARAMETER:
- raise RuntimeError('Bad parameters supplied')
- if status == U1DB_NOT_IMPLEMENTED:
- raise NotImplementedError("Functionality not implemented yet: %s"
- % (context,))
- if status == U1DB_INVALID_VALUE_FOR_INDEX:
- raise errors.InvalidValueForIndex()
- if status == U1DB_INVALID_GLOBBING:
- raise errors.InvalidGlobbing()
- if status == U1DB_INTERNAL_ERROR:
- raise errors.U1DBError("internal error")
- if status == U1DB_BROKEN_SYNC_STREAM:
- raise errors.BrokenSyncStream()
- if status == U1DB_CONFLICTED:
- raise errors.ConflictedDoc()
- if status == U1DB_DUPLICATE_INDEX_NAME:
- raise errors.IndexNameTakenError()
- if status == U1DB_INDEX_DOES_NOT_EXIST:
- raise errors.IndexDoesNotExist
- if status == U1DB_INVALID_GENERATION:
- raise errors.InvalidGeneration
- if status == U1DB_INVALID_TRANSACTION_ID:
- raise errors.InvalidTransactionId
- if status == U1DB_TARGET_UNAVAILABLE:
- raise errors.Unavailable
- if status == U1DB_INVALID_JSON:
- raise errors.InvalidJSON
- if status == U1DB_DOCUMENT_TOO_BIG:
- raise errors.DocumentTooBig
- if status == U1DB_USER_QUOTA_EXCEEDED:
- raise errors.UserQuotaExceeded
- if status == U1DB_INVALID_TRANSFORMATION_FUNCTION:
- raise errors.IndexDefinitionParseError
- if status == U1DB_UNKNOWN_OPERATION:
- raise errors.IndexDefinitionParseError
- if status == U1DB_INVALID_FIELD_SPECIFIER:
- raise errors.IndexDefinitionParseError()
- raise RuntimeError('%s (status: %s)' % (context, status))
-
-
-cdef class CDatabase
-cdef class CSyncTarget
-
-cdef class CSyncExchange(object):
-
- cdef u1db_sync_exchange *_exchange
- cdef CSyncTarget _target
-
- def __init__(self, CSyncTarget target, source_replica_uid, source_gen):
- self._target = target
- assert self._target._st.get_sync_exchange != NULL, \
- "get_sync_exchange is NULL?"
- handle_status("get_sync_exchange",
- self._target._st.get_sync_exchange(self._target._st,
- source_replica_uid, source_gen, &self._exchange))
-
- def __dealloc__(self):
- if self._target is not None and self._target._st != NULL:
- self._target._st.finalize_sync_exchange(self._target._st,
- &self._exchange)
-
- def _check(self):
- if self._exchange == NULL:
- raise RuntimeError("self._exchange is NULL")
-
- property target_gen:
- def __get__(self):
- self._check()
- return self._exchange.target_gen
-
- def insert_doc_from_source(self, CDocument doc, source_gen,
- source_trans_id):
- self._check()
- handle_status("insert_doc_from_source",
- u1db__sync_exchange_insert_doc_from_source(self._exchange,
- doc._doc, source_gen, source_trans_id))
-
- def find_doc_ids_to_return(self):
- self._check()
- handle_status("find_doc_ids_to_return",
- u1db__sync_exchange_find_doc_ids_to_return(self._exchange))
-
- def return_docs(self, return_doc_cb):
- self._check()
- handle_status("return_docs",
- u1db__sync_exchange_return_docs(self._exchange,
- <void *>return_doc_cb, &return_doc_cb_wrapper))
-
- def get_seen_ids(self):
- cdef const_char_ptr *seen_ids
- cdef int i, n_ids
- self._check()
- handle_status("sync_exchange_seen_ids",
- u1db__sync_exchange_seen_ids(self._exchange, &n_ids, &seen_ids))
- res = []
- for i from 0 <= i < n_ids:
- res.append(seen_ids[i])
- if (seen_ids != NULL):
- free(<void*>seen_ids)
- return res
-
- def get_doc_ids_to_return(self):
- self._check()
- res = []
- if (self._exchange.num_doc_ids > 0
- and self._exchange.doc_ids_to_return != NULL):
- for i from 0 <= i < self._exchange.num_doc_ids:
- res.append(
- (self._exchange.doc_ids_to_return[i],
- self._exchange.gen_for_doc_ids[i],
- self._exchange.trans_ids_for_doc_ids[i]))
- return res
-
-
-cdef class CSyncTarget(object):
-
- cdef u1db_sync_target *_st
- cdef CDatabase _db
-
- def __init__(self):
- self._db = None
- self._st = NULL
- u1db__set_zero_delays()
-
- def __dealloc__(self):
- u1db__free_sync_target(&self._st)
-
- def _check(self):
- if self._st == NULL:
- raise RuntimeError("self._st is NULL")
-
- def get_sync_info(self, source_replica_uid):
- cdef const_char_ptr st_replica_uid = NULL
- cdef int st_gen = 0, source_gen = 0, status
- cdef char *trans_id = NULL
- cdef char *st_trans_id = NULL
- cdef char *c_source_replica_uid = NULL
-
- self._check()
- assert self._st.get_sync_info != NULL, "get_sync_info is NULL?"
- c_source_replica_uid = source_replica_uid
- with nogil:
- status = self._st.get_sync_info(self._st, c_source_replica_uid,
- &st_replica_uid, &st_gen, &st_trans_id, &source_gen, &trans_id)
- handle_status("get_sync_info", status)
- res_trans_id = None
- res_st_trans_id = None
- if trans_id != NULL:
- res_trans_id = trans_id
- free(trans_id)
- if st_trans_id != NULL:
- res_st_trans_id = st_trans_id
- free(st_trans_id)
- return (
- safe_str(st_replica_uid), st_gen, res_st_trans_id, source_gen,
- res_trans_id)
-
- def record_sync_info(self, source_replica_uid, source_gen, source_trans_id):
- cdef int status
- cdef int c_source_gen
- cdef char *c_source_replica_uid = NULL
- cdef char *c_source_trans_id = NULL
-
- self._check()
- assert self._st.record_sync_info != NULL, "record_sync_info is NULL?"
- c_source_replica_uid = source_replica_uid
- c_source_gen = source_gen
- c_source_trans_id = source_trans_id
- with nogil:
- status = self._st.record_sync_info(
- self._st, c_source_replica_uid, c_source_gen,
- c_source_trans_id)
- handle_status("record_sync_info", status)
-
- def _get_sync_exchange(self, source_replica_uid, source_gen):
- self._check()
- return CSyncExchange(self, source_replica_uid, source_gen)
-
- def sync_exchange_doc_ids(self, source_db, doc_id_generations,
- last_known_generation, last_known_trans_id,
- return_doc_cb):
- cdef const_char_ptr *doc_ids
- cdef int *generations
- cdef int num_doc_ids
- cdef int target_gen
- cdef char *target_trans_id = NULL
- cdef int status
- cdef CDatabase sdb
-
- self._check()
- assert self._st.sync_exchange_doc_ids != NULL, "sync_exchange_doc_ids is NULL?"
- sdb = source_db
- num_doc_ids = len(doc_id_generations)
- doc_ids = <const_char_ptr *>calloc(num_doc_ids, sizeof(char *))
- if doc_ids == NULL:
- raise MemoryError
- generations = <int *>calloc(num_doc_ids, sizeof(int))
- if generations == NULL:
- free(<void *>doc_ids)
- raise MemoryError
- trans_ids = <const_char_ptr*>calloc(num_doc_ids, sizeof(char *))
- if trans_ids == NULL:
- raise MemoryError
- res_trans_id = ''
- try:
- for i, (doc_id, gen, trans_id) in enumerate(doc_id_generations):
- doc_ids[i] = PyString_AsString(doc_id)
- generations[i] = gen
- trans_ids[i] = trans_id
- target_gen = last_known_generation
- if last_known_trans_id is not None:
- target_trans_id = last_known_trans_id
- with nogil:
- status = self._st.sync_exchange_doc_ids(self._st, sdb._db,
- num_doc_ids, doc_ids, generations, trans_ids,
- &target_gen, &target_trans_id,
- <void*>return_doc_cb, return_doc_cb_wrapper, NULL)
- handle_status("sync_exchange_doc_ids", status)
- if target_trans_id != NULL:
- res_trans_id = target_trans_id
- finally:
- if target_trans_id != NULL:
- free(target_trans_id)
- if doc_ids != NULL:
- free(<void *>doc_ids)
- if generations != NULL:
- free(generations)
- if trans_ids != NULL:
- free(trans_ids)
- return target_gen, res_trans_id
-
- def sync_exchange(self, docs_by_generations, source_replica_uid,
- last_known_generation, last_known_trans_id,
- return_doc_cb, ensure_callback=None):
- cdef CDocument cur_doc
- cdef u1db_document **docs = NULL
- cdef int *generations = NULL
- cdef const_char_ptr *trans_ids = NULL
- cdef char *target_trans_id = NULL
- cdef char *c_source_replica_uid = NULL
- cdef int i, count, status, target_gen
- assert ensure_callback is None # interface difference
-
- self._check()
- assert self._st.sync_exchange != NULL, "sync_exchange is NULL?"
- count = len(docs_by_generations)
- res_trans_id = ''
- try:
- docs = <u1db_document **>calloc(count, sizeof(u1db_document*))
- if docs == NULL:
- raise MemoryError
- generations = <int*>calloc(count, sizeof(int))
- if generations == NULL:
- raise MemoryError
- trans_ids = <const_char_ptr*>calloc(count, sizeof(char*))
- if trans_ids == NULL:
- raise MemoryError
- for i from 0 <= i < count:
- cur_doc = docs_by_generations[i][0]
- generations[i] = docs_by_generations[i][1]
- trans_ids[i] = docs_by_generations[i][2]
- docs[i] = cur_doc._doc
- target_gen = last_known_generation
- if last_known_trans_id is not None:
- target_trans_id = last_known_trans_id
- c_source_replica_uid = source_replica_uid
- with nogil:
- status = self._st.sync_exchange(
- self._st, c_source_replica_uid, count, docs, generations,
- trans_ids, &target_gen, &target_trans_id,
- <void *>return_doc_cb, return_doc_cb_wrapper, NULL)
- handle_status("sync_exchange", status)
- finally:
- if docs != NULL:
- free(docs)
- if generations != NULL:
- free(generations)
- if trans_ids != NULL:
- free(trans_ids)
- if target_trans_id != NULL:
- res_trans_id = target_trans_id
- free(target_trans_id)
- return target_gen, res_trans_id
-
- def _set_trace_hook(self, cb):
- self._check()
- assert self._st._set_trace_hook != NULL, "_set_trace_hook is NULL?"
- handle_status("_set_trace_hook",
- self._st._set_trace_hook(self._st, <void*>cb, _trace_hook))
-
- _set_trace_hook_shallow = _set_trace_hook
-
-
-cdef class CDatabase(object):
- """A thin wrapper/shim to interact with the C implementation.
-
- Functionality should not be written here. It is only provided as a way to
- expose the C API to the python test suite.
- """
-
- cdef public object _filename
- cdef u1database *_db
- cdef public object _supports_indexes
-
- def __init__(self, filename):
- self._supports_indexes = False
- self._filename = filename
- self._db = u1db_open(self._filename)
-
- def __dealloc__(self):
- u1db_free(&self._db)
-
- def close(self):
- return u1db__sql_close(self._db)
-
- def _copy(self, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- new_db = CDatabase(':memory:')
- u1db_free(&new_db._db)
- new_db._db = u1db__copy(self._db)
- return new_db
-
- def _sql_is_open(self):
- if self._db == NULL:
- return True
- return u1db__sql_is_open(self._db)
-
- property _replica_uid:
- def __get__(self):
- cdef const_char_ptr val
- cdef int status
- status = u1db_get_replica_uid(self._db, &val)
- if status != 0:
- if val != NULL:
- err = str(val)
- else:
- err = "<unknown>"
- raise RuntimeError("Failed to get_replica_uid: %d %s"
- % (status, err))
- if val == NULL:
- return None
- return str(val)
-
- def _set_replica_uid(self, replica_uid):
- cdef int status
- status = u1db_set_replica_uid(self._db, replica_uid)
- if status != 0:
- raise RuntimeError('replica_uid could not be set to %s, error: %d'
- % (replica_uid, status))
-
- property document_size_limit:
- def __get__(self):
- cdef int limit
- handle_status("document_size_limit",
- u1db__get_document_size_limit(self._db, &limit))
- return limit
-
- def set_document_size_limit(self, limit):
- cdef int status
- status = u1db_set_document_size_limit(self._db, limit)
- if status != 0:
- raise RuntimeError(
- "document_size_limit could not be set to %d, error: %d",
- (limit, status))
-
- def _allocate_doc_id(self):
- cdef char *val
- val = u1db__allocate_doc_id(self._db)
- if val == NULL:
- raise RuntimeError("Failed to allocate document id")
- s = str(val)
- free(val)
- return s
-
- def _run_sql(self, sql):
- cdef u1db_table *tbl
- cdef u1db_row *cur_row
- cdef size_t n
- cdef int i
-
- if self._db == NULL:
- raise RuntimeError("called _run_sql with a NULL pointer.")
- tbl = u1db__sql_run(self._db, sql, len(sql))
- if tbl == NULL:
- raise MemoryError("Failed to allocate table memory.")
- try:
- if tbl.status != 0:
- raise RuntimeError("Status was not 0: %d" % (tbl.status,))
- # Now convert the table into python
- res = []
- cur_row = tbl.first_row
- while cur_row != NULL:
- row = []
- for i from 0 <= i < cur_row.num_columns:
- row.append(PyString_FromStringAndSize(
- <char*>(cur_row.columns[i]), cur_row.column_sizes[i]))
- res.append(tuple(row))
- cur_row = cur_row.next
- return res
- finally:
- u1db__free_table(&tbl)
-
- def create_doc_from_json(self, json, doc_id=None):
- cdef u1db_document *doc = NULL
- cdef char *c_doc_id
-
- if doc_id is None:
- c_doc_id = NULL
- else:
- c_doc_id = doc_id
- handle_status('Failed to create_doc',
- u1db_create_doc_from_json(self._db, json, c_doc_id, &doc))
- pydoc = CDocument()
- pydoc._doc = doc
- return pydoc
-
- def put_doc(self, CDocument doc):
- handle_status("Failed to put_doc",
- u1db_put_doc(self._db, doc._doc))
- return doc.rev
-
- def _validate_source(self, replica_uid, replica_gen, replica_trans_id):
- cdef const_char_ptr c_uid, c_trans_id
- cdef int c_gen = 0
-
- c_uid = replica_uid
- c_trans_id = replica_trans_id
- c_gen = replica_gen
- handle_status(
- "invalid generation or transaction id",
- u1db__validate_source(self._db, c_uid, c_gen, c_trans_id))
-
- def _put_doc_if_newer(self, CDocument doc, save_conflict, replica_uid=None,
- replica_gen=None, replica_trans_id=None):
- cdef char *c_uid, *c_trans_id
- cdef int gen, state = 0, at_gen = -1
-
- if replica_uid is None:
- c_uid = NULL
- else:
- c_uid = replica_uid
- if replica_trans_id is None:
- c_trans_id = NULL
- else:
- c_trans_id = replica_trans_id
- if replica_gen is None:
- gen = 0
- else:
- gen = replica_gen
- handle_status("Failed to _put_doc_if_newer",
- u1db__put_doc_if_newer(self._db, doc._doc, save_conflict,
- c_uid, gen, c_trans_id, &state, &at_gen))
- if state == U1DB_INSERTED:
- return 'inserted', at_gen
- elif state == U1DB_SUPERSEDED:
- return 'superseded', at_gen
- elif state == U1DB_CONVERGED:
- return 'converged', at_gen
- elif state == U1DB_CONFLICTED:
- return 'conflicted', at_gen
- else:
- raise RuntimeError("Unknown _put_doc_if_newer state: %d" % (state,))
-
- def get_doc(self, doc_id, include_deleted=False):
- cdef u1db_document *doc = NULL
- deleted = 1 if include_deleted else 0
- handle_status("get_doc failed",
- u1db_get_doc(self._db, doc_id, deleted, &doc))
- if doc == NULL:
- return None
- pydoc = CDocument()
- pydoc._doc = doc
- return pydoc
-
- def get_docs(self, doc_ids, check_for_conflicts=True,
- include_deleted=False):
- cdef int n_doc_ids, conflicts
- cdef const_char_ptr *c_doc_ids
-
- _list_to_array(doc_ids, &c_doc_ids, &n_doc_ids)
- deleted = 1 if include_deleted else 0
- conflicts = 1 if check_for_conflicts else 0
- a_list = []
- handle_status("get_docs",
- u1db_get_docs(self._db, n_doc_ids, c_doc_ids,
- conflicts, deleted, <void*>a_list, _append_doc_to_list))
- free(<void*>c_doc_ids)
- return a_list
-
- def get_all_docs(self, include_deleted=False):
- cdef int c_generation
-
- a_list = []
- deleted = 1 if include_deleted else 0
- generation = 0
- c_generation = generation
- handle_status(
- "get_all_docs", u1db_get_all_docs(
- self._db, deleted, &c_generation, <void*>a_list,
- _append_doc_to_list))
- return (c_generation, a_list)
-
- def resolve_doc(self, CDocument doc, conflicted_doc_revs):
- cdef const_char_ptr *revs
- cdef int n_revs
-
- _list_to_array(conflicted_doc_revs, &revs, &n_revs)
- handle_status("resolve_doc",
- u1db_resolve_doc(self._db, doc._doc, n_revs, revs))
- free(<void*>revs)
-
- def get_doc_conflicts(self, doc_id):
- conflict_docs = []
- handle_status("get_doc_conflicts",
- u1db_get_doc_conflicts(self._db, doc_id, <void*>conflict_docs,
- _append_doc_to_list))
- return conflict_docs
-
- def delete_doc(self, CDocument doc):
- handle_status(
- "Failed to delete %s" % (doc,),
- u1db_delete_doc(self._db, doc._doc))
-
- def whats_changed(self, generation=0):
- cdef int c_generation
- cdef int status
- cdef char *trans_id = NULL
-
- a_list = []
- c_generation = generation
- res_trans_id = ''
- status = u1db_whats_changed(self._db, &c_generation, &trans_id,
- <void*>a_list, _append_trans_info_to_list)
- try:
- handle_status("whats_changed", status)
- finally:
- if trans_id != NULL:
- res_trans_id = trans_id
- free(trans_id)
- return c_generation, res_trans_id, a_list
-
- def _get_transaction_log(self):
- a_list = []
- handle_status("_get_transaction_log",
- u1db__get_transaction_log(self._db, <void*>a_list,
- _append_trans_info_to_list))
- return [(doc_id, trans_id) for doc_id, gen, trans_id in a_list]
-
- def _get_generation(self):
- cdef int generation
- handle_status("get_generation",
- u1db__get_generation(self._db, &generation))
- return generation
-
- def _get_generation_info(self):
- cdef int generation
- cdef char *trans_id
- handle_status("get_generation_info",
- u1db__get_generation_info(self._db, &generation, &trans_id))
- raw_trans_id = None
- if trans_id != NULL:
- raw_trans_id = trans_id
- free(trans_id)
- return generation, raw_trans_id
-
- def validate_gen_and_trans_id(self, generation, trans_id):
- handle_status(
- "validate_gen_and_trans_id",
- u1db_validate_gen_and_trans_id(self._db, generation, trans_id))
-
- def _get_trans_id_for_gen(self, generation):
- cdef char *trans_id = NULL
-
- handle_status(
- "_get_trans_id_for_gen",
- u1db__get_trans_id_for_gen(self._db, generation, &trans_id))
- raw_trans_id = None
- if trans_id != NULL:
- raw_trans_id = trans_id
- free(trans_id)
- return raw_trans_id
-
- def _get_replica_gen_and_trans_id(self, replica_uid):
- cdef int generation, status
- cdef char *trans_id = NULL
-
- status = u1db__get_replica_gen_and_trans_id(
- self._db, replica_uid, &generation, &trans_id)
- handle_status("_get_replica_gen_and_trans_id", status)
- raw_trans_id = None
- if trans_id != NULL:
- raw_trans_id = trans_id
- free(trans_id)
- return generation, raw_trans_id
-
- def _set_replica_gen_and_trans_id(self, replica_uid, generation, trans_id):
- handle_status("_set_replica_gen_and_trans_id",
- u1db__set_replica_gen_and_trans_id(
- self._db, replica_uid, generation, trans_id))
-
- def create_index_list(self, index_name, index_expressions):
- cdef const_char_ptr *expressions
- cdef int n_expressions
-
- # keep a reference to new_objs so that the pointers in expressions
- # remain valid.
- new_objs = _list_to_str_array(
- index_expressions, &expressions, &n_expressions)
- try:
- status = u1db_create_index_list(
- self._db, index_name, n_expressions, expressions)
- finally:
- free(<void*>expressions)
- handle_status("create_index", status)
-
- def create_index(self, index_name, *index_expressions):
- extra = []
- if len(index_expressions) == 0:
- status = u1db_create_index(self._db, index_name, 0, NULL)
- elif len(index_expressions) == 1:
- status = u1db_create_index(
- self._db, index_name, 1,
- _ensure_str(index_expressions[0], extra))
- elif len(index_expressions) == 2:
- status = u1db_create_index(
- self._db, index_name, 2,
- _ensure_str(index_expressions[0], extra),
- _ensure_str(index_expressions[1], extra))
- elif len(index_expressions) == 3:
- status = u1db_create_index(
- self._db, index_name, 3,
- _ensure_str(index_expressions[0], extra),
- _ensure_str(index_expressions[1], extra),
- _ensure_str(index_expressions[2], extra))
- elif len(index_expressions) == 4:
- status = u1db_create_index(
- self._db, index_name, 4,
- _ensure_str(index_expressions[0], extra),
- _ensure_str(index_expressions[1], extra),
- _ensure_str(index_expressions[2], extra),
- _ensure_str(index_expressions[3], extra))
- else:
- status = U1DB_NOT_IMPLEMENTED
- handle_status("create_index", status)
-
- def sync(self, url, creds=None):
- cdef const_char_ptr c_url
- cdef int local_gen = 0
- cdef u1db_oauth_creds _oauth_creds
- cdef u1db_creds *_creds = NULL
- c_url = url
- if creds is not None:
- _oauth_creds.auth_kind = U1DB_OAUTH_AUTH
- _oauth_creds.consumer_key = creds['oauth']['consumer_key']
- _oauth_creds.consumer_secret = creds['oauth']['consumer_secret']
- _oauth_creds.token_key = creds['oauth']['token_key']
- _oauth_creds.token_secret = creds['oauth']['token_secret']
- _creds = <u1db_creds *>&_oauth_creds
- with nogil:
- status = u1db_sync(self._db, c_url, _creds, &local_gen)
- handle_status("sync", status)
- return local_gen
-
- def list_indexes(self):
- a_list = []
- handle_status("list_indexes",
- u1db_list_indexes(self._db, <void *>a_list,
- _append_index_definition_to_list))
- return a_list
-
- def delete_index(self, index_name):
- handle_status("delete_index",
- u1db_delete_index(self._db, index_name))
-
- def get_from_index_list(self, index_name, key_values):
- cdef const_char_ptr *values
- cdef int n_values
- cdef CQuery query
-
- query = self._query_init(index_name)
- res = []
- # keep a reference to new_objs so that the pointers in expressions
- # remain valid.
- new_objs = _list_to_str_array(key_values, &values, &n_values)
- try:
- handle_status(
- "get_from_index", u1db_get_from_index_list(
- self._db, query._query, <void*>res, _append_doc_to_list,
- n_values, values))
- finally:
- free(<void*>values)
- return res
-
- def get_from_index(self, index_name, *key_values):
- cdef CQuery query
- cdef int status
-
- extra = []
- query = self._query_init(index_name)
- res = []
- status = U1DB_OK
- if len(key_values) == 0:
- status = u1db_get_from_index(self._db, query._query,
- <void*>res, _append_doc_to_list, 0, NULL)
- elif len(key_values) == 1:
- status = u1db_get_from_index(self._db, query._query,
- <void*>res, _append_doc_to_list, 1,
- _ensure_str(key_values[0], extra))
- elif len(key_values) == 2:
- status = u1db_get_from_index(self._db, query._query,
- <void*>res, _append_doc_to_list, 2,
- _ensure_str(key_values[0], extra),
- _ensure_str(key_values[1], extra))
- elif len(key_values) == 3:
- status = u1db_get_from_index(self._db, query._query,
- <void*>res, _append_doc_to_list, 3,
- _ensure_str(key_values[0], extra),
- _ensure_str(key_values[1], extra),
- _ensure_str(key_values[2], extra))
- elif len(key_values) == 4:
- status = u1db_get_from_index(self._db, query._query,
- <void*>res, _append_doc_to_list, 4,
- _ensure_str(key_values[0], extra),
- _ensure_str(key_values[1], extra),
- _ensure_str(key_values[2], extra),
- _ensure_str(key_values[3], extra))
- else:
- status = U1DB_NOT_IMPLEMENTED
- handle_status("get_from_index", status)
- return res
-
- def get_range_from_index(self, index_name, start_value=None,
- end_value=None):
- cdef CQuery query
- cdef const_char_ptr *start_values
- cdef int n_values
- cdef const_char_ptr *end_values
-
- if start_value is not None:
- if isinstance(start_value, basestring):
- start_value = (start_value,)
- new_objs_1 = _list_to_str_array(
- start_value, &start_values, &n_values)
- else:
- n_values = 0
- start_values = NULL
- if end_value is not None:
- if isinstance(end_value, basestring):
- end_value = (end_value,)
- new_objs_2 = _list_to_str_array(
- end_value, &end_values, &n_values)
- else:
- end_values = NULL
- query = self._query_init(index_name)
- res = []
- try:
- handle_status("get_range_from_index",
- u1db_get_range_from_index(
- self._db, query._query, <void*>res, _append_doc_to_list,
- n_values, start_values, end_values))
- finally:
- if start_values != NULL:
- free(<void*>start_values)
- if end_values != NULL:
- free(<void*>end_values)
- return res
-
- def get_index_keys(self, index_name):
- cdef int status
- keys = []
- status = U1DB_OK
- status = u1db_get_index_keys(
- self._db, index_name, <void*>keys, _append_key_to_list)
- handle_status("get_index_keys", status)
- return keys
-
- def _query_init(self, index_name):
- cdef CQuery query
- query = CQuery()
- handle_status("query_init",
- u1db_query_init(self._db, index_name, &query._query))
- return query
-
- def get_sync_target(self):
- cdef CSyncTarget target
- target = CSyncTarget()
- target._db = self
- handle_status("get_sync_target",
- u1db__get_sync_target(target._db._db, &target._st))
- return target
-
-
-cdef class VectorClockRev:
-
- cdef u1db_vectorclock *_clock
-
- def __init__(self, s):
- if s is None:
- self._clock = u1db__vectorclock_from_str(NULL)
- else:
- self._clock = u1db__vectorclock_from_str(s)
-
- def __dealloc__(self):
- u1db__free_vectorclock(&self._clock)
-
- def __repr__(self):
- cdef int status
- cdef char *res
- if self._clock == NULL:
- return '%s(None)' % (self.__class__.__name__,)
- status = u1db__vectorclock_as_str(self._clock, &res)
- if status != U1DB_OK:
- return '%s(<failure: %d>)' % (status,)
- if res == NULL:
- val = '%s(NULL)' % (self.__class__.__name__,)
- else:
- val = '%s(%s)' % (self.__class__.__name__, res)
- free(res)
- return val
-
- def as_dict(self):
- cdef u1db_vectorclock *cur
- cdef int i
- cdef int gen
- if self._clock == NULL:
- return None
- res = {}
- for i from 0 <= i < self._clock.num_items:
- gen = self._clock.items[i].generation
- res[self._clock.items[i].replica_uid] = gen
- return res
-
- def as_str(self):
- cdef int status
- cdef char *res
-
- status = u1db__vectorclock_as_str(self._clock, &res)
- if status != U1DB_OK:
- raise RuntimeError("Failed to VectorClockRev.as_str(): %d" % (status,))
- if res == NULL:
- s = None
- else:
- s = res
- free(res)
- return s
-
- def increment(self, replica_uid):
- cdef int status
-
- status = u1db__vectorclock_increment(self._clock, replica_uid)
- if status != U1DB_OK:
- raise RuntimeError("Failed to increment: %d" % (status,))
-
- def maximize(self, vcr):
- cdef int status
- cdef VectorClockRev other
-
- other = vcr
- status = u1db__vectorclock_maximize(self._clock, other._clock)
- if status != U1DB_OK:
- raise RuntimeError("Failed to maximize: %d" % (status,))
-
- def is_newer(self, vcr):
- cdef int is_newer
- cdef VectorClockRev other
-
- other = vcr
- is_newer = u1db__vectorclock_is_newer(self._clock, other._clock)
- if is_newer == 0:
- return False
- elif is_newer == 1:
- return True
- else:
- raise RuntimeError("Failed to is_newer: %d" % (is_newer,))
-
-
-def sync_db_to_target(db, target):
- """Sync the data between a CDatabase and a CSyncTarget"""
- cdef CDatabase cdb
- cdef CSyncTarget ctarget
- cdef int local_gen = 0, status
-
- cdb = db
- ctarget = target
- with nogil:
- status = u1db__sync_db_to_target(cdb._db, ctarget._st, &local_gen)
- handle_status("sync_db_to_target", status)
- return local_gen
-
-
-def create_http_sync_target(url):
- cdef CSyncTarget target
-
- target = CSyncTarget()
- handle_status("create_http_sync_target",
- u1db__create_http_sync_target(url, &target._st))
- return target
-
-
-def create_oauth_http_sync_target(url, consumer_key, consumer_secret,
- token_key, token_secret):
- cdef CSyncTarget target
-
- target = CSyncTarget()
- handle_status("create_http_sync_target",
- u1db__create_oauth_http_sync_target(url, consumer_key, consumer_secret,
- token_key, token_secret,
- &target._st))
- return target
-
-
-def _format_sync_url(target, source_replica_uid):
- cdef CSyncTarget st
- cdef char *sync_url = NULL
- cdef object res
- st = target
- handle_status("format_sync_url",
- u1db__format_sync_url(st._st, source_replica_uid, &sync_url))
- if sync_url == NULL:
- res = None
- else:
- res = sync_url
- free(sync_url)
- return res
-
-
-def _get_oauth_authorization(target, method, url):
- cdef CSyncTarget st
- cdef char *auth = NULL
-
- st = target
- handle_status("get_oauth_authorization",
- u1db__get_oauth_authorization(st._st, method, url, &auth))
- res = None
- if auth != NULL:
- res = auth
- free(auth)
- return res
diff --git a/src/leap/soledad/u1db/tests/commandline/__init__.py b/src/leap/soledad/u1db/tests/commandline/__init__.py
deleted file mode 100644
index 007cecd3..00000000
--- a/src/leap/soledad/u1db/tests/commandline/__init__.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-import errno
-import time
-
-
-def safe_close(process, timeout=0.1):
- """Shutdown the process in the nicest fashion you can manage.
-
- :param process: A subprocess.Popen object.
- :param timeout: We'll try to send 'SIGTERM' but if the process is alive
- longer that 'timeout', we'll send SIGKILL.
- """
- if process.poll() is not None:
- return
- try:
- process.terminate()
- except OSError, e:
- if e.errno in (errno.ESRCH,):
- # Process has exited
- return
- tend = time.time() + timeout
- while time.time() < tend:
- if process.poll() is not None:
- return
- time.sleep(0.01)
- try:
- process.kill()
- except OSError, e:
- if e.errno in (errno.ESRCH,):
- # Process has exited
- return
- process.wait()
diff --git a/src/leap/soledad/u1db/tests/commandline/test_client.py b/src/leap/soledad/u1db/tests/commandline/test_client.py
deleted file mode 100644
index 78ca21eb..00000000
--- a/src/leap/soledad/u1db/tests/commandline/test_client.py
+++ /dev/null
@@ -1,916 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-import cStringIO
-import os
-import sys
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-import subprocess
-
-from u1db import (
- errors,
- open as u1db_open,
- tests,
- vectorclock,
- )
-from u1db.commandline import (
- client,
- serve,
- )
-from u1db.tests.commandline import safe_close
-from u1db.tests import test_remote_sync_target
-
-
-class TestArgs(tests.TestCase):
- """These tests are meant to test just the argument parsing.
-
- Each Command should have at least one test, possibly more if it allows
- optional arguments, etc.
- """
-
- def setUp(self):
- super(TestArgs, self).setUp()
- self.parser = client.client_commands.make_argparser()
-
- def parse_args(self, args):
- # ArgumentParser.parse_args doesn't play very nicely with a test suite,
- # so we trap SystemExit in case something is wrong with the args we're
- # parsing.
- try:
- return self.parser.parse_args(args)
- except SystemExit:
- raise AssertionError('got SystemExit')
-
- def test_create(self):
- args = self.parse_args(['create', 'test.db'])
- self.assertEqual(client.CmdCreate, args.subcommand)
- self.assertEqual('test.db', args.database)
- self.assertEqual(None, args.doc_id)
- self.assertEqual(None, args.infile)
-
- def test_create_custom_doc_id(self):
- args = self.parse_args(['create', '--id', 'xyz', 'test.db'])
- self.assertEqual(client.CmdCreate, args.subcommand)
- self.assertEqual('test.db', args.database)
- self.assertEqual('xyz', args.doc_id)
- self.assertEqual(None, args.infile)
-
- def test_delete(self):
- args = self.parse_args(['delete', 'test.db', 'doc-id', 'doc-rev'])
- self.assertEqual(client.CmdDelete, args.subcommand)
- self.assertEqual('test.db', args.database)
- self.assertEqual('doc-id', args.doc_id)
- self.assertEqual('doc-rev', args.doc_rev)
-
- def test_get(self):
- args = self.parse_args(['get', 'test.db', 'doc-id'])
- self.assertEqual(client.CmdGet, args.subcommand)
- self.assertEqual('test.db', args.database)
- self.assertEqual('doc-id', args.doc_id)
- self.assertEqual(None, args.outfile)
-
- def test_get_dash(self):
- args = self.parse_args(['get', 'test.db', 'doc-id', '-'])
- self.assertEqual(client.CmdGet, args.subcommand)
- self.assertEqual('test.db', args.database)
- self.assertEqual('doc-id', args.doc_id)
- self.assertEqual(sys.stdout, args.outfile)
-
- def test_init_db(self):
- args = self.parse_args(
- ['init-db', 'test.db', '--replica-uid=replica-uid'])
- self.assertEqual(client.CmdInitDB, args.subcommand)
- self.assertEqual('test.db', args.database)
- self.assertEqual('replica-uid', args.replica_uid)
-
- def test_init_db_no_replica(self):
- args = self.parse_args(['init-db', 'test.db'])
- self.assertEqual(client.CmdInitDB, args.subcommand)
- self.assertEqual('test.db', args.database)
- self.assertIs(None, args.replica_uid)
-
- def test_put(self):
- args = self.parse_args(['put', 'test.db', 'doc-id', 'old-doc-rev'])
- self.assertEqual(client.CmdPut, args.subcommand)
- self.assertEqual('test.db', args.database)
- self.assertEqual('doc-id', args.doc_id)
- self.assertEqual('old-doc-rev', args.doc_rev)
- self.assertEqual(None, args.infile)
-
- def test_sync(self):
- args = self.parse_args(['sync', 'source', 'target'])
- self.assertEqual(client.CmdSync, args.subcommand)
- self.assertEqual('source', args.source)
- self.assertEqual('target', args.target)
-
- def test_create_index(self):
- args = self.parse_args(['create-index', 'db', 'index', 'expression'])
- self.assertEqual(client.CmdCreateIndex, args.subcommand)
- self.assertEqual('db', args.database)
- self.assertEqual('index', args.index)
- self.assertEqual(['expression'], args.expression)
-
- def test_create_index_multi_expression(self):
- args = self.parse_args(['create-index', 'db', 'index', 'e1', 'e2'])
- self.assertEqual(client.CmdCreateIndex, args.subcommand)
- self.assertEqual('db', args.database)
- self.assertEqual('index', args.index)
- self.assertEqual(['e1', 'e2'], args.expression)
-
- def test_list_indexes(self):
- args = self.parse_args(['list-indexes', 'db'])
- self.assertEqual(client.CmdListIndexes, args.subcommand)
- self.assertEqual('db', args.database)
-
- def test_delete_index(self):
- args = self.parse_args(['delete-index', 'db', 'index'])
- self.assertEqual(client.CmdDeleteIndex, args.subcommand)
- self.assertEqual('db', args.database)
- self.assertEqual('index', args.index)
-
- def test_get_index_keys(self):
- args = self.parse_args(['get-index-keys', 'db', 'index'])
- self.assertEqual(client.CmdGetIndexKeys, args.subcommand)
- self.assertEqual('db', args.database)
- self.assertEqual('index', args.index)
-
- def test_get_from_index(self):
- args = self.parse_args(['get-from-index', 'db', 'index', 'foo'])
- self.assertEqual(client.CmdGetFromIndex, args.subcommand)
- self.assertEqual('db', args.database)
- self.assertEqual('index', args.index)
- self.assertEqual(['foo'], args.values)
-
- def test_get_doc_conflicts(self):
- args = self.parse_args(['get-doc-conflicts', 'db', 'doc-id'])
- self.assertEqual(client.CmdGetDocConflicts, args.subcommand)
- self.assertEqual('db', args.database)
- self.assertEqual('doc-id', args.doc_id)
-
- def test_resolve(self):
- args = self.parse_args(
- ['resolve-doc', 'db', 'doc-id', 'rev:1', 'other:1'])
- self.assertEqual(client.CmdResolve, args.subcommand)
- self.assertEqual('db', args.database)
- self.assertEqual('doc-id', args.doc_id)
- self.assertEqual(['rev:1', 'other:1'], args.doc_revs)
- self.assertEqual(None, args.infile)
-
-
-class TestCaseWithDB(tests.TestCase):
- """These next tests are meant to have one class per Command.
-
- It is meant to test the inner workings of each command. The detailed
- testing should happen in these classes. Stuff like how it handles errors,
- etc. should be done here.
- """
-
- def setUp(self):
- super(TestCaseWithDB, self).setUp()
- self.working_dir = self.createTempDir()
- self.db_path = self.working_dir + '/test.db'
- self.db = u1db_open(self.db_path, create=True)
- self.db._set_replica_uid('test')
- self.addCleanup(self.db.close)
-
- def make_command(self, cls, stdin_content=''):
- inf = cStringIO.StringIO(stdin_content)
- out = cStringIO.StringIO()
- err = cStringIO.StringIO()
- return cls(inf, out, err)
-
-
-class TestCmdCreate(TestCaseWithDB):
-
- def test_create(self):
- cmd = self.make_command(client.CmdCreate)
- inf = cStringIO.StringIO(tests.simple_doc)
- cmd.run(self.db_path, inf, 'test-id')
- doc = self.db.get_doc('test-id')
- self.assertEqual(tests.simple_doc, doc.get_json())
- self.assertFalse(doc.has_conflicts)
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual('id: test-id\nrev: %s\n' % (doc.rev,),
- cmd.stderr.getvalue())
-
-
-class TestCmdDelete(TestCaseWithDB):
-
- def test_delete(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- cmd = self.make_command(client.CmdDelete)
- cmd.run(self.db_path, doc.doc_id, doc.rev)
- doc2 = self.db.get_doc(doc.doc_id, include_deleted=True)
- self.assertEqual(doc.doc_id, doc2.doc_id)
- self.assertNotEqual(doc.rev, doc2.rev)
- self.assertIs(None, doc2.get_json())
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual('rev: %s\n' % (doc2.rev,), cmd.stderr.getvalue())
-
- def test_delete_fails_if_nonexistent(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- db2_path = self.db_path + '.typo'
- cmd = self.make_command(client.CmdDelete)
- # TODO: We should really not be showing a traceback here. But we need
- # to teach the commandline infrastructure how to handle
- # exceptions.
- # However, we *do* want to test that the db doesn't get created
- # by accident.
- self.assertRaises(errors.DatabaseDoesNotExist,
- cmd.run, db2_path, doc.doc_id, doc.rev)
- self.assertFalse(os.path.exists(db2_path))
-
- def test_delete_no_such_doc(self):
- cmd = self.make_command(client.CmdDelete)
- # TODO: We should really not be showing a traceback here. But we need
- # to teach the commandline infrastructure how to handle
- # exceptions.
- self.assertRaises(errors.DocumentDoesNotExist,
- cmd.run, self.db_path, 'no-doc-id', 'no-rev')
-
- def test_delete_bad_rev(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- cmd = self.make_command(client.CmdDelete)
- self.assertRaises(errors.RevisionConflict,
- cmd.run, self.db_path, doc.doc_id, 'not-the-actual-doc-rev:1')
- # TODO: Test that we get a pretty output.
-
-
-class TestCmdGet(TestCaseWithDB):
-
- def setUp(self):
- super(TestCmdGet, self).setUp()
- self.doc = self.db.create_doc_from_json(
- tests.simple_doc, doc_id='my-test-doc')
-
- def test_get_simple(self):
- cmd = self.make_command(client.CmdGet)
- cmd.run(self.db_path, 'my-test-doc', None)
- self.assertEqual(tests.simple_doc + "\n", cmd.stdout.getvalue())
- self.assertEqual('rev: %s\n' % (self.doc.rev,),
- cmd.stderr.getvalue())
-
- def test_get_conflict(self):
- doc = self.make_document('my-test-doc', 'other:1', '{}', False)
- self.db._put_doc_if_newer(
- doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- cmd = self.make_command(client.CmdGet)
- cmd.run(self.db_path, 'my-test-doc', None)
- self.assertEqual('{}\n', cmd.stdout.getvalue())
- self.assertEqual('rev: %s\nDocument has conflicts.\n' % (doc.rev,),
- cmd.stderr.getvalue())
-
- def test_get_fail(self):
- cmd = self.make_command(client.CmdGet)
- result = cmd.run(self.db_path, 'doc-not-there', None)
- self.assertEqual(1, result)
- self.assertEqual("", cmd.stdout.getvalue())
- self.assertTrue("not found" in cmd.stderr.getvalue())
-
- def test_get_no_database(self):
- cmd = self.make_command(client.CmdGet)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST", "my-doc", None)
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Database does not exist.\n')
-
-
-class TestCmdGetDocConflicts(TestCaseWithDB):
-
- def setUp(self):
- super(TestCmdGetDocConflicts, self).setUp()
- self.doc1 = self.db.create_doc_from_json(
- tests.simple_doc, doc_id='my-doc')
- self.doc2 = self.make_document('my-doc', 'other:1', '{}', False)
- self.db._put_doc_if_newer(
- self.doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
-
- def test_get_doc_conflicts_none(self):
- self.db.create_doc_from_json(tests.simple_doc, doc_id='a-doc')
- cmd = self.make_command(client.CmdGetDocConflicts)
- cmd.run(self.db_path, 'a-doc')
- self.assertEqual([], json.loads(cmd.stdout.getvalue()))
- self.assertEqual('', cmd.stderr.getvalue())
-
- def test_get_doc_conflicts_simple(self):
- cmd = self.make_command(client.CmdGetDocConflicts)
- cmd.run(self.db_path, 'my-doc')
- self.assertEqual(
- [dict(rev=self.doc2.rev, content=self.doc2.content),
- dict(rev=self.doc1.rev, content=self.doc1.content)],
- json.loads(cmd.stdout.getvalue()))
- self.assertEqual('', cmd.stderr.getvalue())
-
- def test_get_doc_conflicts_no_db(self):
- cmd = self.make_command(client.CmdGetDocConflicts)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST", "my-doc")
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Database does not exist.\n')
-
- def test_get_doc_conflicts_no_doc(self):
- cmd = self.make_command(client.CmdGetDocConflicts)
- retval = cmd.run(self.db_path, "some-doc")
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Document does not exist.\n')
-
-
-class TestCmdInit(TestCaseWithDB):
-
- def test_init_new(self):
- path = self.working_dir + '/test2.db'
- self.assertFalse(os.path.exists(path))
- cmd = self.make_command(client.CmdInitDB)
- cmd.run(path, 'test-uid')
- self.assertTrue(os.path.exists(path))
- db = u1db_open(path, create=False)
- self.assertEqual('test-uid', db._replica_uid)
-
- def test_init_no_uid(self):
- path = self.working_dir + '/test2.db'
- cmd = self.make_command(client.CmdInitDB)
- cmd.run(path, None)
- self.assertTrue(os.path.exists(path))
- db = u1db_open(path, create=False)
- self.assertIsNot(None, db._replica_uid)
-
-
-class TestCmdPut(TestCaseWithDB):
-
- def setUp(self):
- super(TestCmdPut, self).setUp()
- self.doc = self.db.create_doc_from_json(
- tests.simple_doc, doc_id='my-test-doc')
-
- def test_put_simple(self):
- cmd = self.make_command(client.CmdPut)
- inf = cStringIO.StringIO(tests.nested_doc)
- cmd.run(self.db_path, 'my-test-doc', self.doc.rev, inf)
- doc = self.db.get_doc('my-test-doc')
- self.assertNotEqual(self.doc.rev, doc.rev)
- self.assertGetDoc(self.db, 'my-test-doc', doc.rev,
- tests.nested_doc, False)
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual('rev: %s\n' % (doc.rev,),
- cmd.stderr.getvalue())
-
- def test_put_no_db(self):
- cmd = self.make_command(client.CmdPut)
- inf = cStringIO.StringIO(tests.nested_doc)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST",
- 'my-test-doc', self.doc.rev, inf)
- self.assertEqual(retval, 1)
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual('Database does not exist.\n', cmd.stderr.getvalue())
-
- def test_put_no_doc(self):
- cmd = self.make_command(client.CmdPut)
- inf = cStringIO.StringIO(tests.nested_doc)
- retval = cmd.run(self.db_path, 'no-such-doc', 'wut:1', inf)
- self.assertEqual(1, retval)
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual('Document does not exist.\n', cmd.stderr.getvalue())
-
- def test_put_doc_old_rev(self):
- rev = self.doc.rev
- doc = self.make_document('my-test-doc', rev, '{}', False)
- self.db.put_doc(doc)
- cmd = self.make_command(client.CmdPut)
- inf = cStringIO.StringIO(tests.nested_doc)
- retval = cmd.run(self.db_path, 'my-test-doc', rev, inf)
- self.assertEqual(1, retval)
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual('Given revision is not current.\n',
- cmd.stderr.getvalue())
-
- def test_put_doc_w_conflicts(self):
- doc = self.make_document('my-test-doc', 'other:1', '{}', False)
- self.db._put_doc_if_newer(
- doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- cmd = self.make_command(client.CmdPut)
- inf = cStringIO.StringIO(tests.nested_doc)
- retval = cmd.run(self.db_path, 'my-test-doc', 'other:1', inf)
- self.assertEqual(1, retval)
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual('Document has conflicts.\n'
- 'Inspect with get-doc-conflicts, then resolve.\n',
- cmd.stderr.getvalue())
-
-
-class TestCmdResolve(TestCaseWithDB):
-
- def setUp(self):
- super(TestCmdResolve, self).setUp()
- self.doc1 = self.db.create_doc_from_json(
- tests.simple_doc, doc_id='my-doc')
- self.doc2 = self.make_document('my-doc', 'other:1', '{}', False)
- self.db._put_doc_if_newer(
- self.doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
-
- def test_resolve_simple(self):
- self.assertTrue(self.db.get_doc('my-doc').has_conflicts)
- cmd = self.make_command(client.CmdResolve)
- inf = cStringIO.StringIO(tests.nested_doc)
- cmd.run(self.db_path, 'my-doc', [self.doc1.rev, self.doc2.rev], inf)
- doc = self.db.get_doc('my-doc')
- vec = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(
- vec.is_newer(vectorclock.VectorClockRev(self.doc1.rev)))
- self.assertTrue(
- vec.is_newer(vectorclock.VectorClockRev(self.doc2.rev)))
- self.assertGetDoc(self.db, 'my-doc', doc.rev, tests.nested_doc, False)
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual('rev: %s\n' % (doc.rev,),
- cmd.stderr.getvalue())
-
- def test_resolve_double(self):
- moar = '{"x": 42}'
- doc3 = self.make_document('my-doc', 'third:1', moar, False)
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- cmd = self.make_command(client.CmdResolve)
- inf = cStringIO.StringIO(tests.nested_doc)
- cmd.run(self.db_path, 'my-doc', [self.doc1.rev, self.doc2.rev], inf)
- doc = self.db.get_doc('my-doc')
- self.assertGetDoc(self.db, 'my-doc', doc.rev, moar, True)
- self.assertEqual('', cmd.stdout.getvalue())
- self.assertEqual(
- 'rev: %s\nDocument still has conflicts.\n' % (doc.rev,),
- cmd.stderr.getvalue())
-
- def test_resolve_no_db(self):
- cmd = self.make_command(client.CmdResolve)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST", "my-doc", [], None)
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Database does not exist.\n')
-
- def test_resolve_no_doc(self):
- cmd = self.make_command(client.CmdResolve)
- retval = cmd.run(self.db_path, "foo", [], None)
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Document does not exist.\n')
-
-
-class TestCmdSync(TestCaseWithDB):
-
- def setUp(self):
- super(TestCmdSync, self).setUp()
- self.db2_path = self.working_dir + '/test2.db'
- self.db2 = u1db_open(self.db2_path, create=True)
- self.addCleanup(self.db2.close)
- self.db2._set_replica_uid('test2')
- self.doc = self.db.create_doc_from_json(
- tests.simple_doc, doc_id='test-id')
- self.doc2 = self.db2.create_doc_from_json(
- tests.nested_doc, doc_id='my-test-id')
-
- def test_sync(self):
- cmd = self.make_command(client.CmdSync)
- cmd.run(self.db_path, self.db2_path)
- self.assertGetDoc(self.db2, 'test-id', self.doc.rev, tests.simple_doc,
- False)
- self.assertGetDoc(self.db, 'my-test-id', self.doc2.rev,
- tests.nested_doc, False)
-
-
-class TestCmdSyncRemote(tests.TestCaseWithServer, TestCaseWithDB):
-
- make_app_with_state = \
- staticmethod(test_remote_sync_target.make_http_app)
-
- def setUp(self):
- super(TestCmdSyncRemote, self).setUp()
- self.startServer()
- self.db2 = self.request_state._create_database('test2.db')
-
- def test_sync_remote(self):
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- doc2 = self.db2.create_doc_from_json(tests.nested_doc)
- db2_url = self.getURL('test2.db')
- self.assertTrue(db2_url.startswith('http://'))
- self.assertTrue(db2_url.endswith('/test2.db'))
- cmd = self.make_command(client.CmdSync)
- cmd.run(self.db_path, db2_url)
- self.assertGetDoc(self.db2, doc1.doc_id, doc1.rev, tests.simple_doc,
- False)
- self.assertGetDoc(self.db, doc2.doc_id, doc2.rev, tests.nested_doc,
- False)
-
-
-class TestCmdCreateIndex(TestCaseWithDB):
-
- def test_create_index(self):
- cmd = self.make_command(client.CmdCreateIndex)
- retval = cmd.run(self.db_path, "foo", ["bar", "baz"])
- self.assertEqual(self.db.list_indexes(), [('foo', ['bar', "baz"])])
- self.assertEqual(retval, None) # conveniently mapped to 0
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_create_index_no_db(self):
- cmd = self.make_command(client.CmdCreateIndex)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST", "foo", ["bar"])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Database does not exist.\n')
-
- def test_create_dupe_index(self):
- self.db.create_index("foo", "bar")
- cmd = self.make_command(client.CmdCreateIndex)
- retval = cmd.run(self.db_path, "foo", ["bar"])
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_create_dupe_index_different_expression(self):
- self.db.create_index("foo", "bar")
- cmd = self.make_command(client.CmdCreateIndex)
- retval = cmd.run(self.db_path, "foo", ["baz"])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(),
- "There is already a different index named 'foo'.\n")
-
- def test_create_index_bad_expression(self):
- cmd = self.make_command(client.CmdCreateIndex)
- retval = cmd.run(self.db_path, "foo", ["WAT()"])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(),
- 'Bad index expression.\n')
-
-
-class TestCmdListIndexes(TestCaseWithDB):
-
- def test_list_no_indexes(self):
- cmd = self.make_command(client.CmdListIndexes)
- retval = cmd.run(self.db_path)
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_list_indexes(self):
- self.db.create_index("foo", "bar", "baz")
- cmd = self.make_command(client.CmdListIndexes)
- retval = cmd.run(self.db_path)
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), 'foo: bar, baz\n')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_list_several_indexes(self):
- self.db.create_index("foo", "bar", "baz")
- self.db.create_index("bar", "baz", "foo")
- self.db.create_index("baz", "foo", "bar")
- cmd = self.make_command(client.CmdListIndexes)
- retval = cmd.run(self.db_path)
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(),
- 'bar: baz, foo\n'
- 'baz: foo, bar\n'
- 'foo: bar, baz\n'
- )
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_list_indexes_no_db(self):
- cmd = self.make_command(client.CmdListIndexes)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST")
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Database does not exist.\n')
-
-
-class TestCmdDeleteIndex(TestCaseWithDB):
-
- def test_delete_index(self):
- self.db.create_index("foo", "bar", "baz")
- cmd = self.make_command(client.CmdDeleteIndex)
- retval = cmd.run(self.db_path, "foo")
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), '')
- self.assertEqual([], self.db.list_indexes())
-
- def test_delete_index_no_db(self):
- cmd = self.make_command(client.CmdDeleteIndex)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST", "foo")
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Database does not exist.\n')
-
- def test_delete_index_no_index(self):
- cmd = self.make_command(client.CmdDeleteIndex)
- retval = cmd.run(self.db_path, "foo")
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
-
-class TestCmdGetIndexKeys(TestCaseWithDB):
-
- def test_get_index_keys(self):
- self.db.create_index("foo", "bar")
- self.db.create_doc_from_json('{"bar": 42}')
- cmd = self.make_command(client.CmdGetIndexKeys)
- retval = cmd.run(self.db_path, "foo")
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), '42\n')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_get_index_keys_nonascii(self):
- self.db.create_index("foo", "bar")
- self.db.create_doc_from_json('{"bar": "\u00a4"}')
- cmd = self.make_command(client.CmdGetIndexKeys)
- retval = cmd.run(self.db_path, "foo")
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), '\xc2\xa4\n')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_get_index_keys_empty(self):
- self.db.create_index("foo", "bar")
- cmd = self.make_command(client.CmdGetIndexKeys)
- retval = cmd.run(self.db_path, "foo")
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_get_index_keys_no_db(self):
- cmd = self.make_command(client.CmdGetIndexKeys)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST", "foo")
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Database does not exist.\n')
-
- def test_get_index_keys_no_index(self):
- cmd = self.make_command(client.CmdGetIndexKeys)
- retval = cmd.run(self.db_path, "foo")
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Index does not exist.\n')
-
-
-class TestCmdGetFromIndex(TestCaseWithDB):
-
- def test_get_from_index(self):
- self.db.create_index("index", "key")
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- doc2 = self.db.create_doc_from_json(tests.nested_doc)
- cmd = self.make_command(client.CmdGetFromIndex)
- retval = cmd.run(self.db_path, "index", ["value"])
- self.assertEqual(retval, None)
- self.assertEqual(sorted(json.loads(cmd.stdout.getvalue())),
- sorted([dict(id=doc1.doc_id,
- rev=doc1.rev,
- content=doc1.content),
- dict(id=doc2.doc_id,
- rev=doc2.rev,
- content=doc2.content),
- ]))
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_get_from_index_empty(self):
- self.db.create_index("index", "key")
- cmd = self.make_command(client.CmdGetFromIndex)
- retval = cmd.run(self.db_path, "index", ["value"])
- self.assertEqual(retval, None)
- self.assertEqual(cmd.stdout.getvalue(), '[]\n')
- self.assertEqual(cmd.stderr.getvalue(), '')
-
- def test_get_from_index_no_db(self):
- cmd = self.make_command(client.CmdGetFromIndex)
- retval = cmd.run(self.db_path + "__DOES_NOT_EXIST", "foo", [])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Database does not exist.\n')
-
- def test_get_from_index_no_index(self):
- cmd = self.make_command(client.CmdGetFromIndex)
- retval = cmd.run(self.db_path, "foo", [])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual(cmd.stderr.getvalue(), 'Index does not exist.\n')
-
- def test_get_from_index_two_expr_instead_of_one(self):
- self.db.create_index("index", "key1")
- cmd = self.make_command(client.CmdGetFromIndex)
- cmd.argv = ["XX", "YY"]
- retval = cmd.run(self.db_path, "index", ["value1", "value2"])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual("Invalid query: index 'index' requires"
- " 1 query expression, not 2.\n"
- "For example, the following would be valid:\n"
- " XX YY %r 'index' 'value1'\n"
- % self.db_path, cmd.stderr.getvalue())
-
- def test_get_from_index_three_expr_instead_of_two(self):
- self.db.create_index("index", "key1", "key2")
- cmd = self.make_command(client.CmdGetFromIndex)
- cmd.argv = ["XX", "YY"]
- retval = cmd.run(self.db_path, "index", ["value1", "value2", "value3"])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual("Invalid query: index 'index' requires"
- " 2 query expressions, not 3.\n"
- "For example, the following would be valid:\n"
- " XX YY %r 'index' 'value1' 'value2'\n"
- % self.db_path, cmd.stderr.getvalue())
-
- def test_get_from_index_one_expr_instead_of_two(self):
- self.db.create_index("index", "key1", "key2")
- cmd = self.make_command(client.CmdGetFromIndex)
- cmd.argv = ["XX", "YY"]
- retval = cmd.run(self.db_path, "index", ["value1"])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual("Invalid query: index 'index' requires"
- " 2 query expressions, not 1.\n"
- "For example, the following would be valid:\n"
- " XX YY %r 'index' 'value1' '*'\n"
- % self.db_path, cmd.stderr.getvalue())
-
- def test_get_from_index_cant_bad_glob(self):
- self.db.create_index("index", "key1", "key2")
- cmd = self.make_command(client.CmdGetFromIndex)
- cmd.argv = ["XX", "YY"]
- retval = cmd.run(self.db_path, "index", ["value1*", "value2"])
- self.assertEqual(retval, 1)
- self.assertEqual(cmd.stdout.getvalue(), '')
- self.assertEqual("Invalid query:"
- " a star can only be followed by stars.\n"
- "For example, the following would be valid:\n"
- " XX YY %r 'index' 'value1*' '*'\n"
- % self.db_path, cmd.stderr.getvalue())
-
-
-class RunMainHelper(object):
-
- def run_main(self, args, stdin=None):
- if stdin is not None:
- self.patch(sys, 'stdin', cStringIO.StringIO(stdin))
- stdout = cStringIO.StringIO()
- stderr = cStringIO.StringIO()
- self.patch(sys, 'stdout', stdout)
- self.patch(sys, 'stderr', stderr)
- try:
- ret = client.main(args)
- except SystemExit, e:
- self.fail("Intercepted SystemExit: %s" % (e,))
- if ret is None:
- ret = 0
- return ret, stdout.getvalue(), stderr.getvalue()
-
-
-class TestCommandLine(TestCaseWithDB, RunMainHelper):
- """These are meant to test that the infrastructure is fully connected.
-
- Each command is likely to only have one test here. Something that ensures
- 'main()' knows about and can run the command correctly. Most logic-level
- testing of the Command should go into its own test class above.
- """
-
- def _get_u1db_client_path(self):
- from u1db import __path__ as u1db_path
- u1db_parent_dir = os.path.dirname(u1db_path[0])
- return os.path.join(u1db_parent_dir, 'u1db-client')
-
- def runU1DBClient(self, args):
- command = [sys.executable, self._get_u1db_client_path()]
- command.extend(args)
- p = subprocess.Popen(command, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- self.addCleanup(safe_close, p)
- return p
-
- def test_create_subprocess(self):
- p = self.runU1DBClient(['create', '--id', 'test-id', self.db_path])
- stdout, stderr = p.communicate(tests.simple_doc)
- self.assertEqual(0, p.returncode)
- self.assertEqual('', stdout)
- doc = self.db.get_doc('test-id')
- self.assertEqual(tests.simple_doc, doc.get_json())
- self.assertFalse(doc.has_conflicts)
- expected = 'id: test-id\nrev: %s\n' % (doc.rev,)
- stripped = stderr.replace('\r\n', '\n')
- if expected != stripped:
- # When run under python-dbg, it prints out the refs after the
- # actual content, so match it if we need to.
- expected_re = expected + '\[\d+ refs\]\n'
- self.assertRegexpMatches(stripped, expected_re)
-
- def test_get(self):
- doc = self.db.create_doc_from_json(tests.simple_doc, doc_id='test-id')
- ret, stdout, stderr = self.run_main(['get', self.db_path, 'test-id'])
- self.assertEqual(0, ret)
- self.assertEqual(tests.simple_doc + "\n", stdout)
- self.assertEqual('rev: %s\n' % (doc.rev,), stderr)
- ret, stdout, stderr = self.run_main(['get', self.db_path, 'not-there'])
- self.assertEqual(1, ret)
-
- def test_delete(self):
- doc = self.db.create_doc_from_json(tests.simple_doc, doc_id='test-id')
- ret, stdout, stderr = self.run_main(
- ['delete', self.db_path, 'test-id', doc.rev])
- doc = self.db.get_doc('test-id', include_deleted=True)
- self.assertEqual(0, ret)
- self.assertEqual('', stdout)
- self.assertEqual('rev: %s\n' % (doc.rev,), stderr)
-
- def test_init_db(self):
- path = self.working_dir + '/test2.db'
- ret, stdout, stderr = self.run_main(['init-db', path])
- u1db_open(path, create=False)
-
- def test_put(self):
- doc = self.db.create_doc_from_json(tests.simple_doc, doc_id='test-id')
- ret, stdout, stderr = self.run_main(
- ['put', self.db_path, 'test-id', doc.rev],
- stdin=tests.nested_doc)
- doc = self.db.get_doc('test-id')
- self.assertFalse(doc.has_conflicts)
- self.assertEqual(tests.nested_doc, doc.get_json())
- self.assertEqual(0, ret)
- self.assertEqual('', stdout)
- self.assertEqual('rev: %s\n' % (doc.rev,), stderr)
-
- def test_sync(self):
- doc = self.db.create_doc_from_json(tests.simple_doc, doc_id='test-id')
- self.db2_path = self.working_dir + '/test2.db'
- self.db2 = u1db_open(self.db2_path, create=True)
- self.addCleanup(self.db2.close)
- ret, stdout, stderr = self.run_main(
- ['sync', self.db_path, self.db2_path])
- self.assertEqual(0, ret)
- self.assertEqual('', stdout)
- self.assertEqual('', stderr)
- self.assertGetDoc(
- self.db2, 'test-id', doc.rev, tests.simple_doc, False)
-
-
-class TestHTTPIntegration(tests.TestCaseWithServer, RunMainHelper):
- """Meant to test the cases where commands operate over http."""
-
- def server_def(self):
- def make_server(host_port, _application):
- return serve.make_server(host_port[0], host_port[1],
- self.working_dir)
- return make_server, "shutdown", "http"
-
- def setUp(self):
- super(TestHTTPIntegration, self).setUp()
- self.working_dir = self.createTempDir(prefix='u1db-http-server-')
- self.startServer()
-
- def getPath(self, dbname):
- return os.path.join(self.working_dir, dbname)
-
- def test_init_db(self):
- url = self.getURL('new.db')
- ret, stdout, stderr = self.run_main(['init-db', url])
- u1db_open(self.getPath('new.db'), create=False)
-
- def test_create_get_put_delete(self):
- db = u1db_open(self.getPath('test.db'), create=True)
- url = self.getURL('test.db')
- doc_id = '%abcd'
- ret, stdout, stderr = self.run_main(['create', url, '--id', doc_id],
- stdin=tests.simple_doc)
- self.assertEqual(0, ret)
- ret, stdout, stderr = self.run_main(['get', url, doc_id])
- self.assertEqual(0, ret)
- self.assertTrue(stderr.startswith('rev: '))
- doc_rev = stderr[len('rev: '):].rstrip()
- ret, stdout, stderr = self.run_main(['put', url, doc_id, doc_rev],
- stdin=tests.nested_doc)
- self.assertEqual(0, ret)
- self.assertTrue(stderr.startswith('rev: '))
- doc_rev1 = stderr[len('rev: '):].rstrip()
- self.assertGetDoc(db, doc_id, doc_rev1, tests.nested_doc, False)
- ret, stdout, stderr = self.run_main(['delete', url, doc_id, doc_rev1])
- self.assertEqual(0, ret)
- self.assertTrue(stderr.startswith('rev: '))
- doc_rev2 = stderr[len('rev: '):].rstrip()
- self.assertGetDocIncludeDeleted(db, doc_id, doc_rev2, None, False)
diff --git a/src/leap/soledad/u1db/tests/commandline/test_command.py b/src/leap/soledad/u1db/tests/commandline/test_command.py
deleted file mode 100644
index 43580f23..00000000
--- a/src/leap/soledad/u1db/tests/commandline/test_command.py
+++ /dev/null
@@ -1,105 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-import cStringIO
-import argparse
-
-from u1db import (
- tests,
- )
-from u1db.commandline import (
- command,
- )
-
-
-class MyTestCommand(command.Command):
- """Help String"""
-
- name = 'mycmd'
-
- @classmethod
- def _populate_subparser(cls, parser):
- parser.add_argument('foo')
- parser.add_argument('--bar', dest='nbar', type=int)
-
- def run(self, foo, nbar):
- self.stdout.write('foo: %s nbar: %d' % (foo, nbar))
- return 0
-
-
-def make_stdin_out_err():
- return cStringIO.StringIO(), cStringIO.StringIO(), cStringIO.StringIO()
-
-
-class TestCommandGroup(tests.TestCase):
-
- def trap_system_exit(self, func, *args, **kwargs):
- try:
- return func(*args, **kwargs)
- except SystemExit, e:
- self.fail('Got SystemExit trying to run: %s' % (func,))
-
- def parse_args(self, parser, args):
- return self.trap_system_exit(parser.parse_args, args)
-
- def test_register(self):
- group = command.CommandGroup()
- self.assertEqual({}, group.commands)
- group.register(MyTestCommand)
- self.assertEqual({'mycmd': MyTestCommand},
- group.commands)
-
- def test_make_argparser(self):
- group = command.CommandGroup(description='test-foo')
- parser = group.make_argparser()
- self.assertIsInstance(parser, argparse.ArgumentParser)
-
- def test_make_argparser_with_command(self):
- group = command.CommandGroup(description='test-foo')
- group.register(MyTestCommand)
- parser = group.make_argparser()
- args = self.parse_args(parser, ['mycmd', 'foozizle', '--bar=10'])
- self.assertEqual('foozizle', args.foo)
- self.assertEqual(10, args.nbar)
- self.assertEqual(MyTestCommand, args.subcommand)
-
- def test_run_argv(self):
- group = command.CommandGroup()
- group.register(MyTestCommand)
- stdin, stdout, stderr = make_stdin_out_err()
- ret = self.trap_system_exit(group.run_argv,
- ['mycmd', 'foozizle', '--bar=10'],
- stdin, stdout, stderr)
- self.assertEqual(0, ret)
-
-
-class TestCommand(tests.TestCase):
-
- def make_command(self):
- stdin, stdout, stderr = make_stdin_out_err()
- return command.Command(stdin, stdout, stderr)
-
- def test__init__(self):
- cmd = self.make_command()
- self.assertIsNot(None, cmd.stdin)
- self.assertIsNot(None, cmd.stdout)
- self.assertIsNot(None, cmd.stderr)
-
- def test_run_args(self):
- stdin, stdout, stderr = make_stdin_out_err()
- cmd = MyTestCommand(stdin, stdout, stderr)
- res = cmd.run(foo='foozizle', nbar=10)
- self.assertEqual('foo: foozizle nbar: 10', stdout.getvalue())
diff --git a/src/leap/soledad/u1db/tests/commandline/test_serve.py b/src/leap/soledad/u1db/tests/commandline/test_serve.py
deleted file mode 100644
index 6397eabe..00000000
--- a/src/leap/soledad/u1db/tests/commandline/test_serve.py
+++ /dev/null
@@ -1,101 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-import os
-import socket
-import subprocess
-import sys
-
-from u1db import (
- __version__ as _u1db_version,
- open as u1db_open,
- tests,
- )
-from u1db.remote import http_client
-from u1db.tests.commandline import safe_close
-
-
-class TestU1DBServe(tests.TestCase):
-
- def _get_u1db_serve_path(self):
- from u1db import __path__ as u1db_path
- u1db_parent_dir = os.path.dirname(u1db_path[0])
- return os.path.join(u1db_parent_dir, 'u1db-serve')
-
- def startU1DBServe(self, args):
- command = [sys.executable, self._get_u1db_serve_path()]
- command.extend(args)
- p = subprocess.Popen(command, stdin=subprocess.PIPE,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- self.addCleanup(safe_close, p)
- return p
-
- def test_help(self):
- p = self.startU1DBServe(['--help'])
- stdout, stderr = p.communicate()
- if stderr != '':
- # stderr should normally be empty, but if we are running under
- # python-dbg, it contains the following string
- self.assertRegexpMatches(stderr, r'\[\d+ refs\]')
- self.assertEqual(0, p.returncode)
- self.assertIn('Run the U1DB server', stdout)
-
- def test_bind_to_port(self):
- p = self.startU1DBServe([])
- starts = 'listening on:'
- x = p.stdout.readline()
- self.assertTrue(x.startswith(starts))
- port = int(x[len(starts):].split(":")[1])
- url = "http://127.0.0.1:%s/" % port
- c = http_client.HTTPClientBase(url)
- self.addCleanup(c.close)
- res, _ = c._request_json('GET', [])
- self.assertEqual({'version': _u1db_version}, res)
-
- def test_supply_port(self):
- s = socket.socket()
- s.bind(('127.0.0.1', 0))
- host, port = s.getsockname()
- s.close()
- p = self.startU1DBServe(['--port', str(port)])
- x = p.stdout.readline().strip()
- self.assertEqual('listening on: 127.0.0.1:%s' % (port,), x)
- url = "http://127.0.0.1:%s/" % port
- c = http_client.HTTPClientBase(url)
- self.addCleanup(c.close)
- res, _ = c._request_json('GET', [])
- self.assertEqual({'version': _u1db_version}, res)
-
- def test_bind_to_host(self):
- p = self.startU1DBServe(["--host", "localhost"])
- starts = 'listening on: 127.0.0.1:'
- x = p.stdout.readline()
- self.assertTrue(x.startswith(starts))
-
- def test_supply_working_dir(self):
- tmp_dir = self.createTempDir('u1db-serve-test')
- db = u1db_open(os.path.join(tmp_dir, 'landmark.db'), create=True)
- db.close()
- p = self.startU1DBServe(['--working-dir', tmp_dir])
- starts = 'listening on:'
- x = p.stdout.readline()
- self.assertTrue(x.startswith(starts))
- port = int(x[len(starts):].split(":")[1])
- url = "http://127.0.0.1:%s/landmark.db" % port
- c = http_client.HTTPClientBase(url)
- self.addCleanup(c.close)
- res, _ = c._request_json('GET', [])
- self.assertEqual({}, res)
diff --git a/src/leap/soledad/u1db/tests/test_auth_middleware.py b/src/leap/soledad/u1db/tests/test_auth_middleware.py
deleted file mode 100644
index e765f8a7..00000000
--- a/src/leap/soledad/u1db/tests/test_auth_middleware.py
+++ /dev/null
@@ -1,309 +0,0 @@
-# Copyright 2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test OAuth wsgi middleware"""
-import paste.fixture
-from oauth import oauth
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-import time
-
-from u1db import tests
-
-from u1db.remote.oauth_middleware import OAuthMiddleware
-from u1db.remote.basic_auth_middleware import BasicAuthMiddleware, Unauthorized
-
-
-BASE_URL = 'https://example.net'
-
-
-class TestBasicAuthMiddleware(tests.TestCase):
-
- def setUp(self):
- super(TestBasicAuthMiddleware, self).setUp()
- self.got = []
-
- def witness_app(environ, start_response):
- start_response("200 OK", [("content-type", "text/plain")])
- self.got.append((
- environ['user_id'], environ['PATH_INFO'],
- environ['QUERY_STRING']))
- return ["ok"]
-
- class MyAuthMiddleware(BasicAuthMiddleware):
-
- def verify_user(self, environ, user, password):
- if user != "correct_user":
- raise Unauthorized
- if password != "correct_password":
- raise Unauthorized
- environ['user_id'] = user
-
- self.auth_midw = MyAuthMiddleware(witness_app, prefix="/pfx/")
- self.app = paste.fixture.TestApp(self.auth_midw)
-
- def test_expect_prefix(self):
- url = BASE_URL + '/foo/doc/doc-id'
- resp = self.app.delete(url, expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual('{"error": "bad request"}', resp.body)
-
- def test_missing_auth(self):
- url = BASE_URL + '/pfx/foo/doc/doc-id'
- resp = self.app.delete(url, expect_errors=True)
- self.assertEqual(401, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "unauthorized",
- "message": "Missing Basic Authentication."},
- json.loads(resp.body))
-
- def test_correct_auth(self):
- user = "correct_user"
- password = "correct_password"
- params = {'old_rev': 'old-rev'}
- url = BASE_URL + '/pfx/foo/doc/doc-id?%s' % (
- '&'.join("%s=%s" % (k, v) for k, v in params.items()))
- auth = '%s:%s' % (user, password)
- headers = {
- 'Authorization': 'Basic %s' % (auth.encode('base64'),)}
- resp = self.app.delete(url, headers=headers)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- [('correct_user', '/foo/doc/doc-id', 'old_rev=old-rev')], self.got)
-
- def test_incorrect_auth(self):
- user = "correct_user"
- password = "incorrect_password"
- params = {'old_rev': 'old-rev'}
- url = BASE_URL + '/pfx/foo/doc/doc-id?%s' % (
- '&'.join("%s=%s" % (k, v) for k, v in params.items()))
- auth = '%s:%s' % (user, password)
- headers = {
- 'Authorization': 'Basic %s' % (auth.encode('base64'),)}
- resp = self.app.delete(url, headers=headers, expect_errors=True)
- self.assertEqual(401, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "unauthorized",
- "message": "Incorrect password or login."},
- json.loads(resp.body))
-
-
-class TestOAuthMiddlewareDefaultPrefix(tests.TestCase):
- def setUp(self):
-
- super(TestOAuthMiddlewareDefaultPrefix, self).setUp()
- self.got = []
-
- def witness_app(environ, start_response):
- start_response("200 OK", [("content-type", "text/plain")])
- self.got.append((environ['token_key'], environ['PATH_INFO'],
- environ['QUERY_STRING']))
- return ["ok"]
-
- class MyOAuthMiddleware(OAuthMiddleware):
- get_oauth_data_store = lambda self: tests.testingOAuthStore
-
- def verify(self, environ, oauth_req):
- consumer, token = super(MyOAuthMiddleware, self).verify(
- environ, oauth_req)
- environ['token_key'] = token.key
-
- self.oauth_midw = MyOAuthMiddleware(witness_app, BASE_URL)
- self.app = paste.fixture.TestApp(self.oauth_midw)
-
- def test_expect_tilde(self):
- url = BASE_URL + '/foo/doc/doc-id'
- resp = self.app.delete(url, expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual('{"error": "bad request"}', resp.body)
-
- def test_oauth_in_header(self):
- url = BASE_URL + '/~/foo/doc/doc-id'
- params = {'old_rev': 'old-rev'}
- oauth_req = oauth.OAuthRequest.from_consumer_and_token(
- tests.consumer2,
- tests.token2,
- parameters=params,
- http_url=url,
- http_method='DELETE'
- )
- url = oauth_req.get_normalized_http_url() + '?' + (
- '&'.join("%s=%s" % (k, v) for k, v in params.items()))
- oauth_req.sign_request(tests.sign_meth_HMAC_SHA1,
- tests.consumer2, tests.token2)
- resp = self.app.delete(url, headers=oauth_req.to_header())
- self.assertEqual(200, resp.status)
- self.assertEqual([(tests.token2.key,
- '/foo/doc/doc-id', 'old_rev=old-rev')], self.got)
-
- def test_oauth_in_query_string(self):
- url = BASE_URL + '/~/foo/doc/doc-id'
- params = {'old_rev': 'old-rev'}
- oauth_req = oauth.OAuthRequest.from_consumer_and_token(
- tests.consumer1,
- tests.token1,
- parameters=params,
- http_url=url,
- http_method='DELETE'
- )
- oauth_req.sign_request(tests.sign_meth_HMAC_SHA1,
- tests.consumer1, tests.token1)
- resp = self.app.delete(oauth_req.to_url())
- self.assertEqual(200, resp.status)
- self.assertEqual([(tests.token1.key,
- '/foo/doc/doc-id', 'old_rev=old-rev')], self.got)
-
-
-class TestOAuthMiddleware(tests.TestCase):
-
- def setUp(self):
- super(TestOAuthMiddleware, self).setUp()
- self.got = []
-
- def witness_app(environ, start_response):
- start_response("200 OK", [("content-type", "text/plain")])
- self.got.append((environ['token_key'], environ['PATH_INFO'],
- environ['QUERY_STRING']))
- return ["ok"]
-
- class MyOAuthMiddleware(OAuthMiddleware):
- get_oauth_data_store = lambda self: tests.testingOAuthStore
-
- def verify(self, environ, oauth_req):
- consumer, token = super(MyOAuthMiddleware, self).verify(
- environ, oauth_req)
- environ['token_key'] = token.key
-
- self.oauth_midw = MyOAuthMiddleware(
- witness_app, BASE_URL, prefix='/pfx/')
- self.app = paste.fixture.TestApp(self.oauth_midw)
-
- def test_expect_prefix(self):
- url = BASE_URL + '/foo/doc/doc-id'
- resp = self.app.delete(url, expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual('{"error": "bad request"}', resp.body)
-
- def test_missing_oauth(self):
- url = BASE_URL + '/pfx/foo/doc/doc-id'
- resp = self.app.delete(url, expect_errors=True)
- self.assertEqual(401, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "unauthorized", "message": "Missing OAuth."},
- json.loads(resp.body))
-
- def test_oauth_in_query_string(self):
- url = BASE_URL + '/pfx/foo/doc/doc-id'
- params = {'old_rev': 'old-rev'}
- oauth_req = oauth.OAuthRequest.from_consumer_and_token(
- tests.consumer1,
- tests.token1,
- parameters=params,
- http_url=url,
- http_method='DELETE'
- )
- oauth_req.sign_request(tests.sign_meth_HMAC_SHA1,
- tests.consumer1, tests.token1)
- resp = self.app.delete(oauth_req.to_url())
- self.assertEqual(200, resp.status)
- self.assertEqual([(tests.token1.key,
- '/foo/doc/doc-id', 'old_rev=old-rev')], self.got)
-
- def test_oauth_invalid(self):
- url = BASE_URL + '/pfx/foo/doc/doc-id'
- params = {'old_rev': 'old-rev'}
- oauth_req = oauth.OAuthRequest.from_consumer_and_token(
- tests.consumer1,
- tests.token3,
- parameters=params,
- http_url=url,
- http_method='DELETE'
- )
- oauth_req.sign_request(tests.sign_meth_HMAC_SHA1,
- tests.consumer1, tests.token3)
- resp = self.app.delete(oauth_req.to_url(),
- expect_errors=True)
- self.assertEqual(401, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- err = json.loads(resp.body)
- self.assertEqual({"error": "unauthorized",
- "message": err['message']},
- err)
-
- def test_oauth_in_header(self):
- url = BASE_URL + '/pfx/foo/doc/doc-id'
- params = {'old_rev': 'old-rev'}
- oauth_req = oauth.OAuthRequest.from_consumer_and_token(
- tests.consumer2,
- tests.token2,
- parameters=params,
- http_url=url,
- http_method='DELETE'
- )
- url = oauth_req.get_normalized_http_url() + '?' + (
- '&'.join("%s=%s" % (k, v) for k, v in params.items()))
- oauth_req.sign_request(tests.sign_meth_HMAC_SHA1,
- tests.consumer2, tests.token2)
- resp = self.app.delete(url, headers=oauth_req.to_header())
- self.assertEqual(200, resp.status)
- self.assertEqual([(tests.token2.key,
- '/foo/doc/doc-id', 'old_rev=old-rev')], self.got)
-
- def test_oauth_plain_text(self):
- url = BASE_URL + '/pfx/foo/doc/doc-id'
- params = {'old_rev': 'old-rev'}
- oauth_req = oauth.OAuthRequest.from_consumer_and_token(
- tests.consumer1,
- tests.token1,
- parameters=params,
- http_url=url,
- http_method='DELETE'
- )
- oauth_req.sign_request(tests.sign_meth_PLAINTEXT,
- tests.consumer1, tests.token1)
- resp = self.app.delete(oauth_req.to_url())
- self.assertEqual(200, resp.status)
- self.assertEqual([(tests.token1.key,
- '/foo/doc/doc-id', 'old_rev=old-rev')], self.got)
-
- def test_oauth_timestamp_threshold(self):
- url = BASE_URL + '/pfx/foo/doc/doc-id'
- params = {'old_rev': 'old-rev'}
- oauth_req = oauth.OAuthRequest.from_consumer_and_token(
- tests.consumer1,
- tests.token1,
- parameters=params,
- http_url=url,
- http_method='DELETE'
- )
- oauth_req.set_parameter('oauth_timestamp', int(time.time()) - 5)
- oauth_req.sign_request(tests.sign_meth_PLAINTEXT,
- tests.consumer1, tests.token1)
- # tweak threshold
- self.oauth_midw.timestamp_threshold = 1
- resp = self.app.delete(oauth_req.to_url(), expect_errors=True)
- self.assertEqual(401, resp.status)
- err = json.loads(resp.body)
- self.assertIn('Expired timestamp', err['message'])
- self.assertIn('threshold 1', err['message'])
diff --git a/src/leap/soledad/u1db/tests/test_backends.py b/src/leap/soledad/u1db/tests/test_backends.py
deleted file mode 100644
index 7a3c9e5c..00000000
--- a/src/leap/soledad/u1db/tests/test_backends.py
+++ /dev/null
@@ -1,1895 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""The backend class for U1DB. This deals with hiding storage details."""
-
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-from u1db import (
- DocumentBase,
- errors,
- tests,
- vectorclock,
- )
-
-simple_doc = tests.simple_doc
-nested_doc = tests.nested_doc
-
-from u1db.tests.test_remote_sync_target import (
- make_http_app,
- make_oauth_http_app,
-)
-
-from u1db.remote import (
- http_database,
- )
-
-try:
- from u1db.tests import c_backend_wrapper
-except ImportError:
- c_backend_wrapper = None # noqa
-
-
-def make_http_database_for_test(test, replica_uid, path='test'):
- test.startServer()
- test.request_state._create_database(replica_uid)
- return http_database.HTTPDatabase(test.getURL(path))
-
-
-def copy_http_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- return test.request_state._copy_database(db)
-
-
-def make_oauth_http_database_for_test(test, replica_uid):
- http_db = make_http_database_for_test(test, replica_uid, '~/test')
- http_db.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return http_db
-
-
-def copy_oauth_http_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- http_db = test.request_state._copy_database(db)
- http_db.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return http_db
-
-
-class TestAlternativeDocument(DocumentBase):
- """A (not very) alternative implementation of Document."""
-
-
-class AllDatabaseTests(tests.DatabaseBaseTests, tests.TestCaseWithServer):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS + [
- ('http', {'make_database_for_test': make_http_database_for_test,
- 'copy_database_for_test': copy_http_database_for_test,
- 'make_document_for_test': tests.make_document_for_test,
- 'make_app_with_state': make_http_app}),
- ('oauth_http', {'make_database_for_test':
- make_oauth_http_database_for_test,
- 'copy_database_for_test':
- copy_oauth_http_database_for_test,
- 'make_document_for_test': tests.make_document_for_test,
- 'make_app_with_state': make_oauth_http_app})
- ] + tests.C_DATABASE_SCENARIOS
-
- def test_close(self):
- self.db.close()
-
- def test_create_doc_allocating_doc_id(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertNotEqual(None, doc.doc_id)
- self.assertNotEqual(None, doc.rev)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
-
- def test_create_doc_different_ids_same_db(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertNotEqual(doc1.doc_id, doc2.doc_id)
-
- def test_create_doc_with_id(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my-id')
- self.assertEqual('my-id', doc.doc_id)
- self.assertNotEqual(None, doc.rev)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
-
- def test_create_doc_existing_id(self):
- doc = self.db.create_doc_from_json(simple_doc)
- new_content = '{"something": "else"}'
- self.assertRaises(
- errors.RevisionConflict, self.db.create_doc_from_json,
- new_content, doc.doc_id)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
-
- def test_put_doc_creating_initial(self):
- doc = self.make_document('my_doc_id', None, simple_doc)
- new_rev = self.db.put_doc(doc)
- self.assertIsNot(None, new_rev)
- self.assertGetDoc(self.db, 'my_doc_id', new_rev, simple_doc, False)
-
- def test_put_doc_space_in_id(self):
- doc = self.make_document('my doc id', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_put_doc_update(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- orig_rev = doc.rev
- doc.set_json('{"updated": "stuff"}')
- new_rev = self.db.put_doc(doc)
- self.assertNotEqual(new_rev, orig_rev)
- self.assertGetDoc(self.db, 'my_doc_id', new_rev,
- '{"updated": "stuff"}', False)
- self.assertEqual(doc.rev, new_rev)
-
- def test_put_non_ascii_key(self):
- content = json.dumps({u'key\xe5': u'val'})
- doc = self.db.create_doc_from_json(content, doc_id='my_doc')
- self.assertGetDoc(self.db, 'my_doc', doc.rev, content, False)
-
- def test_put_non_ascii_value(self):
- content = json.dumps({'key': u'\xe5'})
- doc = self.db.create_doc_from_json(content, doc_id='my_doc')
- self.assertGetDoc(self.db, 'my_doc', doc.rev, content, False)
-
- def test_put_doc_refuses_no_id(self):
- doc = self.make_document(None, None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
- doc = self.make_document("", None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_put_doc_refuses_slashes(self):
- doc = self.make_document('a/b', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
- doc = self.make_document(r'\b', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_put_doc_url_quoting_is_fine(self):
- doc_id = "%2F%2Ffoo%2Fbar"
- doc = self.make_document(doc_id, None, simple_doc)
- new_rev = self.db.put_doc(doc)
- self.assertGetDoc(self.db, doc_id, new_rev, simple_doc, False)
-
- def test_put_doc_refuses_non_existing_old_rev(self):
- doc = self.make_document('doc-id', 'test:4', simple_doc)
- self.assertRaises(errors.RevisionConflict, self.db.put_doc, doc)
-
- def test_put_doc_refuses_non_ascii_doc_id(self):
- doc = self.make_document('d\xc3\xa5c-id', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_put_fails_with_bad_old_rev(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- old_rev = doc.rev
- bad_doc = self.make_document(doc.doc_id, 'other:1',
- '{"something": "else"}')
- self.assertRaises(errors.RevisionConflict, self.db.put_doc, bad_doc)
- self.assertGetDoc(self.db, 'my_doc_id', old_rev, simple_doc, False)
-
- def test_create_succeeds_after_delete(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.db.delete_doc(doc)
- deleted_doc = self.db.get_doc('my_doc_id', include_deleted=True)
- deleted_vc = vectorclock.VectorClockRev(deleted_doc.rev)
- new_doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.assertGetDoc(self.db, 'my_doc_id', new_doc.rev, simple_doc, False)
- new_vc = vectorclock.VectorClockRev(new_doc.rev)
- self.assertTrue(
- new_vc.is_newer(deleted_vc),
- "%s does not supersede %s" % (new_doc.rev, deleted_doc.rev))
-
- def test_put_succeeds_after_delete(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.db.delete_doc(doc)
- deleted_doc = self.db.get_doc('my_doc_id', include_deleted=True)
- deleted_vc = vectorclock.VectorClockRev(deleted_doc.rev)
- doc2 = self.make_document('my_doc_id', None, simple_doc)
- self.db.put_doc(doc2)
- self.assertGetDoc(self.db, 'my_doc_id', doc2.rev, simple_doc, False)
- new_vc = vectorclock.VectorClockRev(doc2.rev)
- self.assertTrue(
- new_vc.is_newer(deleted_vc),
- "%s does not supersede %s" % (doc2.rev, deleted_doc.rev))
-
- def test_get_doc_after_put(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.assertGetDoc(self.db, 'my_doc_id', doc.rev, simple_doc, False)
-
- def test_get_doc_nonexisting(self):
- self.assertIs(None, self.db.get_doc('non-existing'))
-
- def test_get_doc_deleted(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.db.delete_doc(doc)
- self.assertIs(None, self.db.get_doc('my_doc_id'))
-
- def test_get_doc_include_deleted(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.db.delete_doc(doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, doc.rev, None, False)
-
- def test_get_docs(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertEqual([doc1, doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id])))
-
- def test_get_docs_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.db.delete_doc(doc1)
- self.assertEqual([doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id])))
-
- def test_get_docs_include_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.db.delete_doc(doc1)
- self.assertEqual(
- [doc1, doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id],
- include_deleted=True)))
-
- def test_get_docs_request_ordered(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertEqual([doc1, doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id])))
- self.assertEqual([doc2, doc1],
- list(self.db.get_docs([doc2.doc_id, doc1.doc_id])))
-
- def test_get_docs_empty_list(self):
- self.assertEqual([], list(self.db.get_docs([])))
-
- def test_handles_nested_content(self):
- doc = self.db.create_doc_from_json(nested_doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, nested_doc, False)
-
- def test_handles_doc_with_null(self):
- doc = self.db.create_doc_from_json('{"key": null}')
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, '{"key": null}', False)
-
- def test_delete_doc(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
- orig_rev = doc.rev
- self.db.delete_doc(doc)
- self.assertNotEqual(orig_rev, doc.rev)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, doc.rev, None, False)
- self.assertIs(None, self.db.get_doc(doc.doc_id))
-
- def test_delete_doc_non_existent(self):
- doc = self.make_document('non-existing', 'other:1', simple_doc)
- self.assertRaises(errors.DocumentDoesNotExist, self.db.delete_doc, doc)
-
- def test_delete_doc_already_deleted(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc)
- self.assertRaises(errors.DocumentAlreadyDeleted,
- self.db.delete_doc, doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, doc.rev, None, False)
-
- def test_delete_doc_bad_rev(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, simple_doc, False)
- doc2 = self.make_document(doc1.doc_id, 'other:1', simple_doc)
- self.assertRaises(errors.RevisionConflict, self.db.delete_doc, doc2)
- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, simple_doc, False)
-
- def test_delete_doc_sets_content_to_None(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc)
- self.assertIs(None, doc.get_json())
-
- def test_delete_doc_rev_supersedes(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc.set_json(nested_doc)
- self.db.put_doc(doc)
- doc.set_json('{"fishy": "content"}')
- self.db.put_doc(doc)
- old_rev = doc.rev
- self.db.delete_doc(doc)
- cur_vc = vectorclock.VectorClockRev(old_rev)
- deleted_vc = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(deleted_vc.is_newer(cur_vc),
- "%s does not supersede %s" % (doc.rev, old_rev))
-
- def test_delete_then_put(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, doc.rev, None, False)
- doc.set_json(nested_doc)
- self.db.put_doc(doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, nested_doc, False)
-
-
-class DocumentSizeTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS + tests.C_DATABASE_SCENARIOS
-
- def test_put_doc_refuses_oversized_documents(self):
- self.db.set_document_size_limit(1)
- doc = self.make_document('doc-id', None, simple_doc)
- self.assertRaises(errors.DocumentTooBig, self.db.put_doc, doc)
-
- def test_create_doc_refuses_oversized_documents(self):
- self.db.set_document_size_limit(1)
- self.assertRaises(
- errors.DocumentTooBig, self.db.create_doc_from_json, simple_doc,
- doc_id='my_doc_id')
-
- def test_set_document_size_limit_zero(self):
- self.db.set_document_size_limit(0)
- self.assertEqual(0, self.db.document_size_limit)
-
- def test_set_document_size_limit(self):
- self.db.set_document_size_limit(1000000)
- self.assertEqual(1000000, self.db.document_size_limit)
-
-
-class LocalDatabaseTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS + tests.C_DATABASE_SCENARIOS
-
- def test_create_doc_different_ids_diff_db(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- db2 = self.create_database('other-uid')
- doc2 = db2.create_doc_from_json(simple_doc)
- self.assertNotEqual(doc1.doc_id, doc2.doc_id)
-
- def test_put_doc_refuses_slashes_picky(self):
- doc = self.make_document('/a', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_get_all_docs_empty(self):
- self.assertEqual([], list(self.db.get_all_docs()[1]))
-
- def test_get_all_docs(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertEqual(
- sorted([doc1, doc2]), sorted(list(self.db.get_all_docs()[1])))
-
- def test_get_all_docs_exclude_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.db.delete_doc(doc2)
- self.assertEqual([doc1], list(self.db.get_all_docs()[1]))
-
- def test_get_all_docs_include_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.db.delete_doc(doc2)
- self.assertEqual(
- sorted([doc1, doc2]),
- sorted(list(self.db.get_all_docs(include_deleted=True)[1])))
-
- def test_get_all_docs_generation(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_doc_from_json(nested_doc)
- self.assertEqual(2, self.db.get_all_docs()[0])
-
- def test_simple_put_doc_if_newer(self):
- doc = self.make_document('my-doc-id', 'test:1', simple_doc)
- state_at_gen = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(('inserted', 1), state_at_gen)
- self.assertGetDoc(self.db, 'my-doc-id', 'test:1', simple_doc, False)
-
- def test_simple_put_doc_if_newer_deleted(self):
- self.db.create_doc_from_json('{}', doc_id='my-doc-id')
- doc = self.make_document('my-doc-id', 'test:2', None)
- state_at_gen = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(('inserted', 2), state_at_gen)
- self.assertGetDocIncludeDeleted(
- self.db, 'my-doc-id', 'test:2', None, False)
-
- def test_put_doc_if_newer_already_superseded(self):
- orig_doc = '{"new": "doc"}'
- doc1 = self.db.create_doc_from_json(orig_doc)
- doc1_rev1 = doc1.rev
- doc1.set_json(simple_doc)
- self.db.put_doc(doc1)
- doc1_rev2 = doc1.rev
- # Nothing is inserted, because the document is already superseded
- doc = self.make_document(doc1.doc_id, doc1_rev1, orig_doc)
- state, _ = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual('superseded', state)
- self.assertGetDoc(self.db, doc1.doc_id, doc1_rev2, simple_doc, False)
-
- def test_put_doc_if_newer_autoresolve(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- rev = doc1.rev
- doc = self.make_document(doc1.doc_id, "whatever:1", doc1.get_json())
- state, _ = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual('superseded', state)
- doc2 = self.db.get_doc(doc1.doc_id)
- v2 = vectorclock.VectorClockRev(doc2.rev)
- self.assertTrue(v2.is_newer(vectorclock.VectorClockRev("whatever:1")))
- self.assertTrue(v2.is_newer(vectorclock.VectorClockRev(rev)))
- # strictly newer locally
- self.assertTrue(rev not in doc2.rev)
-
- def test_put_doc_if_newer_already_converged(self):
- orig_doc = '{"new": "doc"}'
- doc1 = self.db.create_doc_from_json(orig_doc)
- state_at_gen = self.db._put_doc_if_newer(
- doc1, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(('converged', 1), state_at_gen)
-
- def test_put_doc_if_newer_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- # Nothing is inserted, the document id is returned as would-conflict
- alt_doc = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- state, _ = self.db._put_doc_if_newer(
- alt_doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual('conflicted', state)
- # The database wasn't altered
- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, simple_doc, False)
-
- def test_put_doc_if_newer_newer_generation(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- doc = self.make_document('doc_id', 'other:2', simple_doc)
- state, _ = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='other', replica_gen=2,
- replica_trans_id='T-irrelevant')
- self.assertEqual('inserted', state)
-
- def test_put_doc_if_newer_same_generation_same_txid(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- doc = self.db.create_doc_from_json(simple_doc)
- self.make_document(doc.doc_id, 'other:1', simple_doc)
- state, _ = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='other', replica_gen=1,
- replica_trans_id='T-sid')
- self.assertEqual('converged', state)
-
- def test_put_doc_if_newer_wrong_transaction_id(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- doc = self.make_document('doc_id', 'other:1', simple_doc)
- self.assertRaises(
- errors.InvalidTransactionId,
- self.db._put_doc_if_newer, doc, save_conflict=False,
- replica_uid='other', replica_gen=1, replica_trans_id='T-sad')
-
- def test_put_doc_if_newer_old_generation_older_doc(self):
- orig_doc = '{"new": "doc"}'
- doc = self.db.create_doc_from_json(orig_doc)
- doc_rev1 = doc.rev
- doc.set_json(simple_doc)
- self.db.put_doc(doc)
- self.db._set_replica_gen_and_trans_id('other', 3, 'T-sid')
- older_doc = self.make_document(doc.doc_id, doc_rev1, simple_doc)
- state, _ = self.db._put_doc_if_newer(
- older_doc, save_conflict=False, replica_uid='other', replica_gen=8,
- replica_trans_id='T-irrelevant')
- self.assertEqual('superseded', state)
-
- def test_put_doc_if_newer_old_generation_newer_doc(self):
- self.db._set_replica_gen_and_trans_id('other', 5, 'T-sid')
- doc = self.make_document('doc_id', 'other:1', simple_doc)
- self.assertRaises(
- errors.InvalidGeneration,
- self.db._put_doc_if_newer, doc, save_conflict=False,
- replica_uid='other', replica_gen=1, replica_trans_id='T-sad')
-
- def test_put_doc_if_newer_replica_uid(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- doc2 = self.make_document(doc1.doc_id, doc1.rev + '|other:1',
- nested_doc)
- self.assertEqual('inserted',
- self.db._put_doc_if_newer(doc2, save_conflict=False,
- replica_uid='other', replica_gen=2,
- replica_trans_id='T-id2')[0])
- self.assertEqual((2, 'T-id2'), self.db._get_replica_gen_and_trans_id(
- 'other'))
- # Compare to the old rev, should be superseded
- doc2 = self.make_document(doc1.doc_id, doc1.rev, nested_doc)
- self.assertEqual('superseded',
- self.db._put_doc_if_newer(doc2, save_conflict=False,
- replica_uid='other', replica_gen=3,
- replica_trans_id='T-id3')[0])
- self.assertEqual(
- (3, 'T-id3'), self.db._get_replica_gen_and_trans_id('other'))
- # A conflict that isn't saved still records the sync gen, because we
- # don't need to see it again
- doc2 = self.make_document(doc1.doc_id, doc1.rev + '|fourth:1',
- '{}')
- self.assertEqual('conflicted',
- self.db._put_doc_if_newer(doc2, save_conflict=False,
- replica_uid='other', replica_gen=4,
- replica_trans_id='T-id4')[0])
- self.assertEqual(
- (4, 'T-id4'), self.db._get_replica_gen_and_trans_id('other'))
-
- def test__get_replica_gen_and_trans_id(self):
- self.assertEqual(
- (0, ''), self.db._get_replica_gen_and_trans_id('other-db'))
- self.db._set_replica_gen_and_trans_id('other-db', 2, 'T-transaction')
- self.assertEqual(
- (2, 'T-transaction'),
- self.db._get_replica_gen_and_trans_id('other-db'))
-
- def test_put_updates_transaction_log(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- doc.set_json('{"something": "else"}')
- self.db.put_doc(doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual((2, last_trans_id, [(doc.doc_id, 2, last_trans_id)]),
- self.db.whats_changed())
-
- def test_delete_updates_transaction_log(self):
- doc = self.db.create_doc_from_json(simple_doc)
- db_gen, _, _ = self.db.whats_changed()
- self.db.delete_doc(doc)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual((2, last_trans_id, [(doc.doc_id, 2, last_trans_id)]),
- self.db.whats_changed(db_gen))
-
- def test_whats_changed_initial_database(self):
- self.assertEqual((0, '', []), self.db.whats_changed())
-
- def test_whats_changed_returns_one_id_for_multiple_changes(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc.set_json('{"new": "contents"}')
- self.db.put_doc(doc)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual((2, last_trans_id, [(doc.doc_id, 2, last_trans_id)]),
- self.db.whats_changed())
- self.assertEqual((2, last_trans_id, []), self.db.whats_changed(2))
-
- def test_whats_changed_returns_last_edits_ascending(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc.set_json('{"new": "contents"}')
- self.db.delete_doc(doc1)
- delete_trans_id = self.getLastTransId(self.db)
- self.db.put_doc(doc)
- put_trans_id = self.getLastTransId(self.db)
- self.assertEqual((4, put_trans_id,
- [(doc1.doc_id, 3, delete_trans_id),
- (doc.doc_id, 4, put_trans_id)]),
- self.db.whats_changed())
-
- def test_whats_changed_doesnt_include_old_gen(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(simple_doc)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual((3, last_trans_id, [(doc2.doc_id, 3, last_trans_id)]),
- self.db.whats_changed(2))
-
-
-class LocalDatabaseValidateGenNTransIdTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS + tests.C_DATABASE_SCENARIOS
-
- def test_validate_gen_and_trans_id(self):
- self.db.create_doc_from_json(simple_doc)
- gen, trans_id = self.db._get_generation_info()
- self.db.validate_gen_and_trans_id(gen, trans_id)
-
- def test_validate_gen_and_trans_id_invalid_txid(self):
- self.db.create_doc_from_json(simple_doc)
- gen, _ = self.db._get_generation_info()
- self.assertRaises(
- errors.InvalidTransactionId,
- self.db.validate_gen_and_trans_id, gen, 'wrong')
-
- def test_validate_gen_and_trans_id_invalid_gen(self):
- self.db.create_doc_from_json(simple_doc)
- gen, trans_id = self.db._get_generation_info()
- self.assertRaises(
- errors.InvalidGeneration,
- self.db.validate_gen_and_trans_id, gen + 1, trans_id)
-
-
-class LocalDatabaseValidateSourceGenTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS + tests.C_DATABASE_SCENARIOS
-
- def test_validate_source_gen_and_trans_id_same(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- self.db._validate_source('other', 1, 'T-sid')
-
- def test_validate_source_gen_newer(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- self.db._validate_source('other', 2, 'T-whatevs')
-
- def test_validate_source_wrong_txid(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- self.assertRaises(
- errors.InvalidTransactionId,
- self.db._validate_source, 'other', 1, 'T-sad')
-
-
-class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
- # test supporting/functionality around storing conflicts
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS + tests.C_DATABASE_SCENARIOS
-
- def test_get_docs_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual([doc2], list(self.db.get_docs([doc1.doc_id])))
-
- def test_get_docs_conflicts_ignored(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- alt_doc = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- no_conflict_doc = self.make_document(doc1.doc_id, 'alternate:1',
- nested_doc)
- self.assertEqual([no_conflict_doc, doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id],
- check_for_conflicts=False)))
-
- def test_get_doc_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- alt_doc = self.make_document(doc.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual([alt_doc, doc],
- self.db.get_doc_conflicts(doc.doc_id))
-
- def test_get_all_docs_sees_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- alt_doc = self.make_document(doc.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- _, docs = self.db.get_all_docs()
- self.assertTrue(list(docs)[0].has_conflicts)
-
- def test_get_doc_conflicts_unconflicted(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertEqual([], self.db.get_doc_conflicts(doc.doc_id))
-
- def test_get_doc_conflicts_no_such_id(self):
- self.assertEqual([], self.db.get_doc_conflicts('doc-id'))
-
- def test_resolve_doc(self):
- doc = self.db.create_doc_from_json(simple_doc)
- alt_doc = self.make_document(doc.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc.doc_id,
- [('alternate:1', nested_doc), (doc.rev, simple_doc)])
- orig_rev = doc.rev
- self.db.resolve_doc(doc, [alt_doc.rev, doc.rev])
- self.assertNotEqual(orig_rev, doc.rev)
- self.assertFalse(doc.has_conflicts)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
- self.assertGetDocConflicts(self.db, doc.doc_id, [])
-
- def test_resolve_doc_picks_biggest_vcr(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc2.rev, nested_doc),
- (doc1.rev, simple_doc)])
- orig_doc1_rev = doc1.rev
- self.db.resolve_doc(doc1, [doc2.rev, doc1.rev])
- self.assertFalse(doc1.has_conflicts)
- self.assertNotEqual(orig_doc1_rev, doc1.rev)
- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, simple_doc, False)
- self.assertGetDocConflicts(self.db, doc1.doc_id, [])
- vcr_1 = vectorclock.VectorClockRev(orig_doc1_rev)
- vcr_2 = vectorclock.VectorClockRev(doc2.rev)
- vcr_new = vectorclock.VectorClockRev(doc1.rev)
- self.assertTrue(vcr_new.is_newer(vcr_1))
- self.assertTrue(vcr_new.is_newer(vcr_2))
-
- def test_resolve_doc_partial_not_winning(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc2.rev, nested_doc),
- (doc1.rev, simple_doc)])
- content3 = '{"key": "valin3"}'
- doc3 = self.make_document(doc1.doc_id, 'third:1', content3)
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='bar')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc3.rev, content3),
- (doc1.rev, simple_doc),
- (doc2.rev, nested_doc)])
- self.db.resolve_doc(doc1, [doc2.rev, doc1.rev])
- self.assertTrue(doc1.has_conflicts)
- self.assertGetDoc(self.db, doc1.doc_id, doc3.rev, content3, True)
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc3.rev, content3),
- (doc1.rev, simple_doc)])
-
- def test_resolve_doc_partial_winning(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- content3 = '{"key": "valin3"}'
- doc3 = self.make_document(doc1.doc_id, 'third:1', content3)
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='bar')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc3.rev, content3),
- (doc1.rev, simple_doc),
- (doc2.rev, nested_doc)])
- self.db.resolve_doc(doc1, [doc3.rev, doc1.rev])
- self.assertTrue(doc1.has_conflicts)
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc1.rev, simple_doc),
- (doc2.rev, nested_doc)])
-
- def test_resolve_doc_with_delete_conflict(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc1)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc2.rev, nested_doc),
- (doc1.rev, None)])
- self.db.resolve_doc(doc2, [doc1.rev, doc2.rev])
- self.assertGetDocConflicts(self.db, doc1.doc_id, [])
- self.assertGetDoc(self.db, doc2.doc_id, doc2.rev, nested_doc, False)
-
- def test_resolve_doc_with_delete_to_delete(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc1)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc2.rev, nested_doc),
- (doc1.rev, None)])
- self.db.resolve_doc(doc1, [doc1.rev, doc2.rev])
- self.assertGetDocConflicts(self.db, doc1.doc_id, [])
- self.assertGetDocIncludeDeleted(
- self.db, doc1.doc_id, doc1.rev, None, False)
-
- def test_put_doc_if_newer_save_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- # Document is inserted as a conflict
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- state, _ = self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual('conflicted', state)
- # The database was updated
- self.assertGetDoc(self.db, doc1.doc_id, doc2.rev, nested_doc, True)
-
- def test_force_doc_conflict_supersedes_properly(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', '{"b": 1}')
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- doc3 = self.make_document(doc1.doc_id, 'altalt:1', '{"c": 1}')
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='bar')
- doc22 = self.make_document(doc1.doc_id, 'alternate:2', '{"b": 2}')
- self.db._put_doc_if_newer(
- doc22, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='zed')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:2', doc22.get_json()),
- ('altalt:1', doc3.get_json()),
- (doc1.rev, simple_doc)])
-
- def test_put_doc_if_newer_save_conflict_was_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc1)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertTrue(doc2.has_conflicts)
- self.assertGetDoc(
- self.db, doc1.doc_id, 'alternate:1', nested_doc, True)
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:1', nested_doc), (doc1.rev, None)])
-
- def test_put_doc_if_newer_propagates_full_resolution(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- resolved_vcr = vectorclock.VectorClockRev(doc1.rev)
- vcr_2 = vectorclock.VectorClockRev(doc2.rev)
- resolved_vcr.maximize(vcr_2)
- resolved_vcr.increment('alternate')
- doc_resolved = self.make_document(doc1.doc_id, resolved_vcr.as_str(),
- '{"good": 1}')
- state, _ = self.db._put_doc_if_newer(
- doc_resolved, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertEqual('inserted', state)
- self.assertFalse(doc_resolved.has_conflicts)
- self.assertGetDocConflicts(self.db, doc1.doc_id, [])
- doc3 = self.db.get_doc(doc1.doc_id)
- self.assertFalse(doc3.has_conflicts)
-
- def test_put_doc_if_newer_propagates_partial_resolution(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'altalt:1', '{}')
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- doc3 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:1', nested_doc), ('test:1', simple_doc),
- ('altalt:1', '{}')])
- resolved_vcr = vectorclock.VectorClockRev(doc1.rev)
- vcr_3 = vectorclock.VectorClockRev(doc3.rev)
- resolved_vcr.maximize(vcr_3)
- resolved_vcr.increment('alternate')
- doc_resolved = self.make_document(doc1.doc_id, resolved_vcr.as_str(),
- '{"good": 1}')
- state, _ = self.db._put_doc_if_newer(
- doc_resolved, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='foo3')
- self.assertEqual('inserted', state)
- self.assertTrue(doc_resolved.has_conflicts)
- doc4 = self.db.get_doc(doc1.doc_id)
- self.assertTrue(doc4.has_conflicts)
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:2|test:1', '{"good": 1}'), ('altalt:1', '{}')])
-
- def test_put_doc_if_newer_replica_uid(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-id')
- doc2 = self.make_document(doc1.doc_id, doc1.rev + '|other:1',
- nested_doc)
- self.db._put_doc_if_newer(doc2, save_conflict=True,
- replica_uid='other', replica_gen=2,
- replica_trans_id='T-id2')
- # Conflict vs the current update
- doc2 = self.make_document(doc1.doc_id, doc1.rev + '|third:3',
- '{}')
- self.assertEqual('conflicted',
- self.db._put_doc_if_newer(doc2, save_conflict=True,
- replica_uid='other', replica_gen=3,
- replica_trans_id='T-id3')[0])
- self.assertEqual(
- (3, 'T-id3'), self.db._get_replica_gen_and_trans_id('other'))
-
- def test_put_doc_if_newer_autoresolve_2(self):
- # this is an ordering variant of _3, but that already works
- # adding the test explicitly to catch the regression easily
- doc_a1 = self.db.create_doc_from_json(simple_doc)
- doc_a2 = self.make_document(doc_a1.doc_id, 'test:2', "{}")
- doc_a1b1 = self.make_document(doc_a1.doc_id, 'test:1|other:1',
- '{"a":"42"}')
- doc_a3 = self.make_document(doc_a1.doc_id, 'test:2|other:1', "{}")
- state, _ = self.db._put_doc_if_newer(
- doc_a2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(state, 'inserted')
- state, _ = self.db._put_doc_if_newer(
- doc_a1b1, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertEqual(state, 'conflicted')
- state, _ = self.db._put_doc_if_newer(
- doc_a3, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='foo3')
- self.assertEqual(state, 'inserted')
- self.assertFalse(self.db.get_doc(doc_a1.doc_id).has_conflicts)
-
- def test_put_doc_if_newer_autoresolve_3(self):
- doc_a1 = self.db.create_doc_from_json(simple_doc)
- doc_a1b1 = self.make_document(doc_a1.doc_id, 'test:1|other:1', "{}")
- doc_a2 = self.make_document(doc_a1.doc_id, 'test:2', '{"a":"42"}')
- doc_a3 = self.make_document(doc_a1.doc_id, 'test:3', "{}")
- state, _ = self.db._put_doc_if_newer(
- doc_a1b1, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(state, 'inserted')
- state, _ = self.db._put_doc_if_newer(
- doc_a2, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertEqual(state, 'conflicted')
- state, _ = self.db._put_doc_if_newer(
- doc_a3, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='foo3')
- self.assertEqual(state, 'superseded')
- doc = self.db.get_doc(doc_a1.doc_id, True)
- self.assertFalse(doc.has_conflicts)
- rev = vectorclock.VectorClockRev(doc.rev)
- rev_a3 = vectorclock.VectorClockRev('test:3')
- rev_a1b1 = vectorclock.VectorClockRev('test:1|other:1')
- self.assertTrue(rev.is_newer(rev_a3))
- self.assertTrue('test:4' in doc.rev) # locally increased
- self.assertTrue(rev.is_newer(rev_a1b1))
-
- def test_put_doc_if_newer_autoresolve_4(self):
- doc_a1 = self.db.create_doc_from_json(simple_doc)
- doc_a1b1 = self.make_document(doc_a1.doc_id, 'test:1|other:1', None)
- doc_a2 = self.make_document(doc_a1.doc_id, 'test:2', '{"a":"42"}')
- doc_a3 = self.make_document(doc_a1.doc_id, 'test:3', None)
- state, _ = self.db._put_doc_if_newer(
- doc_a1b1, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(state, 'inserted')
- state, _ = self.db._put_doc_if_newer(
- doc_a2, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertEqual(state, 'conflicted')
- state, _ = self.db._put_doc_if_newer(
- doc_a3, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='foo3')
- self.assertEqual(state, 'superseded')
- doc = self.db.get_doc(doc_a1.doc_id, True)
- self.assertFalse(doc.has_conflicts)
- rev = vectorclock.VectorClockRev(doc.rev)
- rev_a3 = vectorclock.VectorClockRev('test:3')
- rev_a1b1 = vectorclock.VectorClockRev('test:1|other:1')
- self.assertTrue(rev.is_newer(rev_a3))
- self.assertTrue('test:4' in doc.rev) # locally increased
- self.assertTrue(rev.is_newer(rev_a1b1))
-
- def test_put_refuses_to_update_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- content2 = '{"key": "altval"}'
- doc2 = self.make_document(doc1.doc_id, 'altrev:1', content2)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDoc(self.db, doc1.doc_id, doc2.rev, content2, True)
- content3 = '{"key": "local"}'
- doc2.set_json(content3)
- self.assertRaises(errors.ConflictedDoc, self.db.put_doc, doc2)
-
- def test_delete_refuses_for_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'altrev:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDoc(self.db, doc2.doc_id, doc2.rev, nested_doc, True)
- self.assertRaises(errors.ConflictedDoc, self.db.delete_doc, doc2)
-
-
-class DatabaseIndexTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS + tests.C_DATABASE_SCENARIOS
-
- def assertParseError(self, definition):
- self.db.create_doc_from_json(nested_doc)
- self.assertRaises(
- errors.IndexDefinitionParseError, self.db.create_index, 'idx',
- definition)
-
- def assertIndexCreatable(self, definition):
- name = "idx"
- self.db.create_doc_from_json(nested_doc)
- self.db.create_index(name, definition)
- self.assertEqual(
- [(name, [definition])], self.db.list_indexes())
-
- def test_create_index(self):
- self.db.create_index('test-idx', 'name')
- self.assertEqual([('test-idx', ['name'])],
- self.db.list_indexes())
-
- def test_create_index_on_non_ascii_field_name(self):
- doc = self.db.create_doc_from_json(json.dumps({u'\xe5': 'value'}))
- self.db.create_index('test-idx', u'\xe5')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_list_indexes_with_non_ascii_field_names(self):
- self.db.create_index('test-idx', u'\xe5')
- self.assertEqual(
- [('test-idx', [u'\xe5'])], self.db.list_indexes())
-
- def test_create_index_evaluates_it(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_wildcard_matches_unicode_value(self):
- doc = self.db.create_doc_from_json(json.dumps({"key": u"valu\xe5"}))
- self.db.create_index('test-idx', 'key')
- self.assertEqual([doc], self.db.get_from_index('test-idx', '*'))
-
- def test_retrieve_unicode_value_from_index(self):
- doc = self.db.create_doc_from_json(json.dumps({"key": u"valu\xe5"}))
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', u"valu\xe5"))
-
- def test_create_index_fails_if_name_taken(self):
- self.db.create_index('test-idx', 'key')
- self.assertRaises(errors.IndexNameTakenError,
- self.db.create_index,
- 'test-idx', 'stuff')
-
- def test_create_index_does_not_fail_if_name_taken_with_same_index(self):
- self.db.create_index('test-idx', 'key')
- self.db.create_index('test-idx', 'key')
- self.assertEqual([('test-idx', ['key'])], self.db.list_indexes())
-
- def test_create_index_does_not_duplicate_indexed_fields(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.db.delete_index('test-idx')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(1, len(self.db.get_from_index('test-idx', 'value')))
-
- def test_delete_index_does_not_remove_fields_from_other_indexes(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.db.create_index('test-idx2', 'key')
- self.db.delete_index('test-idx')
- self.assertEqual(1, len(self.db.get_from_index('test-idx2', 'value')))
-
- def test_create_index_after_deleting_document(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc2)
- self.db.create_index('test-idx', 'key')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_delete_index(self):
- self.db.create_index('test-idx', 'key')
- self.assertEqual([('test-idx', ['key'])], self.db.list_indexes())
- self.db.delete_index('test-idx')
- self.assertEqual([], self.db.list_indexes())
-
- def test_create_adds_to_index(self):
- self.db.create_index('test-idx', 'key')
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_get_from_index_unmatched(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual([], self.db.get_from_index('test-idx', 'novalue'))
-
- def test_create_index_multiple_exact_matches(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- sorted([doc, doc2]),
- sorted(self.db.get_from_index('test-idx', 'value')))
-
- def test_get_from_index(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_get_from_index_multi(self):
- content = '{"key": "value", "key2": "value2"}'
- doc = self.db.create_doc_from_json(content)
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'value', 'value2'))
-
- def test_get_from_index_multi_list(self):
- doc = self.db.create_doc_from_json(
- '{"key": "value", "key2": ["value2-1", "value2-2", "value2-3"]}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'value', 'value2-1'))
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'value', 'value2-2'))
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'value', 'value2-3'))
- self.assertEqual(
- [('value', 'value2-1'), ('value', 'value2-2'),
- ('value', 'value2-3')],
- sorted(self.db.get_index_keys('test-idx')))
-
- def test_get_from_index_sees_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key', 'key2')
- alt_doc = self.make_document(
- doc.doc_id, 'alternate:1',
- '{"key": "value", "key2": ["value2-1", "value2-2", "value2-3"]}')
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- docs = self.db.get_from_index('test-idx', 'value', 'value2-1')
- self.assertTrue(docs[0].has_conflicts)
-
- def test_get_index_keys_multi_list_list(self):
- self.db.create_doc_from_json(
- '{"key": "value1-1 value1-2 value1-3", '
- '"key2": ["value2-1", "value2-2", "value2-3"]}')
- self.db.create_index('test-idx', 'split_words(key)', 'key2')
- self.assertEqual(
- [(u'value1-1', u'value2-1'), (u'value1-1', u'value2-2'),
- (u'value1-1', u'value2-3'), (u'value1-2', u'value2-1'),
- (u'value1-2', u'value2-2'), (u'value1-2', u'value2-3'),
- (u'value1-3', u'value2-1'), (u'value1-3', u'value2-2'),
- (u'value1-3', u'value2-3')],
- sorted(self.db.get_index_keys('test-idx')))
-
- def test_get_from_index_multi_ordered(self):
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2, doc1],
- self.db.get_from_index('test-idx', 'v*', '*'))
-
- def test_get_range_from_index_start_end(self):
- doc1 = self.db.create_doc_from_json('{"key": "value3"}')
- doc2 = self.db.create_doc_from_json('{"key": "value2"}')
- self.db.create_doc_from_json('{"key": "value4"}')
- self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc2, doc1],
- self.db.get_range_from_index('test-idx', 'value2', 'value3'))
-
- def test_get_range_from_index_start(self):
- doc1 = self.db.create_doc_from_json('{"key": "value3"}')
- doc2 = self.db.create_doc_from_json('{"key": "value2"}')
- doc3 = self.db.create_doc_from_json('{"key": "value4"}')
- self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc2, doc1, doc3],
- self.db.get_range_from_index('test-idx', 'value2'))
-
- def test_get_range_from_index_sees_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- alt_doc = self.make_document(
- doc.doc_id, 'alternate:1', '{"key": "valuedepalue"}')
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- docs = self.db.get_range_from_index('test-idx', 'a')
- self.assertTrue(docs[0].has_conflicts)
-
- def test_get_range_from_index_end(self):
- self.db.create_doc_from_json('{"key": "value3"}')
- doc2 = self.db.create_doc_from_json('{"key": "value2"}')
- self.db.create_doc_from_json('{"key": "value4"}')
- doc4 = self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc4, doc2],
- self.db.get_range_from_index('test-idx', None, 'value2'))
-
- def test_get_wildcard_range_from_index_start(self):
- doc1 = self.db.create_doc_from_json('{"key": "value4"}')
- doc2 = self.db.create_doc_from_json('{"key": "value23"}')
- doc3 = self.db.create_doc_from_json('{"key": "value2"}')
- doc4 = self.db.create_doc_from_json('{"key": "value22"}')
- self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc3, doc4, doc2, doc1],
- self.db.get_range_from_index('test-idx', 'value2*'))
-
- def test_get_wildcard_range_from_index_end(self):
- self.db.create_doc_from_json('{"key": "value4"}')
- doc2 = self.db.create_doc_from_json('{"key": "value23"}')
- doc3 = self.db.create_doc_from_json('{"key": "value2"}')
- doc4 = self.db.create_doc_from_json('{"key": "value22"}')
- doc5 = self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc5, doc3, doc4, doc2],
- self.db.get_range_from_index('test-idx', None, 'value2*'))
-
- def test_get_wildcard_range_from_index_start_end(self):
- self.db.create_doc_from_json('{"key": "a"}')
- self.db.create_doc_from_json('{"key": "boo3"}')
- doc3 = self.db.create_doc_from_json('{"key": "catalyst"}')
- doc4 = self.db.create_doc_from_json('{"key": "whaever"}')
- self.db.create_doc_from_json('{"key": "zerg"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc3, doc4],
- self.db.get_range_from_index('test-idx', 'cat*', 'zap*'))
-
- def test_get_range_from_index_multi_column_start_end(self):
- self.db.create_doc_from_json('{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc3, doc2],
- self.db.get_range_from_index(
- 'test-idx', ('value2', 'value2'), ('value2', 'value3')))
-
- def test_get_range_from_index_multi_column_start(self):
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- self.db.create_doc_from_json('{"key": "value2", "key2": "value2"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc2, doc1],
- self.db.get_range_from_index('test-idx', ('value2', 'value3')))
-
- def test_get_range_from_index_multi_column_end(self):
- self.db.create_doc_from_json('{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2],
- self.db.get_range_from_index(
- 'test-idx', None, ('value2', 'value3')))
-
- def test_get_wildcard_range_from_index_multi_column_start(self):
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value23"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc3, doc2, doc1],
- self.db.get_range_from_index('test-idx', ('value2', 'value2*')))
-
- def test_get_wildcard_range_from_index_multi_column_end(self):
- self.db.create_doc_from_json('{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value23"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2],
- self.db.get_range_from_index(
- 'test-idx', None, ('value2', 'value2*')))
-
- def test_get_glob_range_from_index_multi_column_start(self):
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value23"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value2"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc2, doc1],
- self.db.get_range_from_index('test-idx', ('value2', '*')))
-
- def test_get_glob_range_from_index_multi_column_end(self):
- self.db.create_doc_from_json('{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value23"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2],
- self.db.get_range_from_index('test-idx', None, ('value2', '*')))
-
- def test_get_range_from_index_illegal_wildcard_order(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_range_from_index, 'test-idx', ('*', 'v2'))
-
- def test_get_range_from_index_illegal_glob_after_wildcard(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_range_from_index, 'test-idx', ('*', 'v*'))
-
- def test_get_range_from_index_illegal_wildcard_order_end(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_range_from_index, 'test-idx', None, ('*', 'v2'))
-
- def test_get_range_from_index_illegal_glob_after_wildcard_end(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_range_from_index, 'test-idx', None, ('*', 'v*'))
-
- def test_get_from_index_fails_if_no_index(self):
- self.assertRaises(
- errors.IndexDoesNotExist, self.db.get_from_index, 'foo')
-
- def test_get_index_keys_fails_if_no_index(self):
- self.assertRaises(errors.IndexDoesNotExist,
- self.db.get_index_keys,
- 'foo')
-
- def test_get_index_keys_works_if_no_docs(self):
- self.db.create_index('test-idx', 'key')
- self.assertEqual([], self.db.get_index_keys('test-idx'))
-
- def test_put_updates_index(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- new_content = '{"key": "altval"}'
- doc.set_json(new_content)
- self.db.put_doc(doc)
- self.assertEqual([], self.db.get_from_index('test-idx', 'value'))
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'altval'))
-
- def test_delete_updates_index(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- sorted([doc, doc2]),
- sorted(self.db.get_from_index('test-idx', 'value')))
- self.db.delete_doc(doc)
- self.assertEqual([doc2], self.db.get_from_index('test-idx', 'value'))
-
- def test_get_from_index_illegal_number_of_entries(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidValueForIndex, self.db.get_from_index, 'test-idx')
- self.assertRaises(
- errors.InvalidValueForIndex,
- self.db.get_from_index, 'test-idx', 'v1')
- self.assertRaises(
- errors.InvalidValueForIndex,
- self.db.get_from_index, 'test-idx', 'v1', 'v2', 'v3')
-
- def test_get_from_index_illegal_wildcard_order(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_from_index, 'test-idx', '*', 'v2')
-
- def test_get_from_index_illegal_glob_after_wildcard(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_from_index, 'test-idx', '*', 'v*')
-
- def test_get_all_from_index(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- # This one should not be in the index
- self.db.create_doc_from_json('{"no": "key"}')
- diff_value_doc = '{"key": "diff value"}'
- doc4 = self.db.create_doc_from_json(diff_value_doc)
- # This is essentially a 'prefix' match, but we match every entry.
- self.assertEqual(
- sorted([doc1, doc2, doc4]),
- sorted(self.db.get_from_index('test-idx', '*')))
-
- def test_get_all_from_index_ordered(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json('{"key": "value x"}')
- doc2 = self.db.create_doc_from_json('{"key": "value b"}')
- doc3 = self.db.create_doc_from_json('{"key": "value a"}')
- doc4 = self.db.create_doc_from_json('{"key": "value m"}')
- # This is essentially a 'prefix' match, but we match every entry.
- self.assertEqual(
- [doc3, doc2, doc4, doc1], self.db.get_from_index('test-idx', '*'))
-
- def test_put_updates_when_adding_key(self):
- doc = self.db.create_doc_from_json("{}")
- self.db.create_index('test-idx', 'key')
- self.assertEqual([], self.db.get_from_index('test-idx', '*'))
- doc.set_json(simple_doc)
- self.db.put_doc(doc)
- self.assertEqual([doc], self.db.get_from_index('test-idx', '*'))
-
- def test_get_from_index_empty_string(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json(simple_doc)
- content2 = '{"key": ""}'
- doc2 = self.db.create_doc_from_json(content2)
- self.assertEqual([doc2], self.db.get_from_index('test-idx', ''))
- # Empty string matches the wildcard.
- self.assertEqual(
- sorted([doc1, doc2]),
- sorted(self.db.get_from_index('test-idx', '*')))
-
- def test_get_from_index_not_null(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db.create_doc_from_json('{"key": null}')
- self.assertEqual([doc1], self.db.get_from_index('test-idx', '*'))
-
- def test_get_partial_from_index(self):
- content1 = '{"k1": "v1", "k2": "v2"}'
- content2 = '{"k1": "v1", "k2": "x2"}'
- content3 = '{"k1": "v1", "k2": "y2"}'
- # doc4 has a different k1 value, so it doesn't match the prefix.
- content4 = '{"k1": "NN", "k2": "v2"}'
- doc1 = self.db.create_doc_from_json(content1)
- doc2 = self.db.create_doc_from_json(content2)
- doc3 = self.db.create_doc_from_json(content3)
- self.db.create_doc_from_json(content4)
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertEqual(
- sorted([doc1, doc2, doc3]),
- sorted(self.db.get_from_index('test-idx', "v1", "*")))
-
- def test_get_glob_match(self):
- # Note: the exact glob syntax is probably subject to change
- content1 = '{"k1": "v1", "k2": "v1"}'
- content2 = '{"k1": "v1", "k2": "v2"}'
- content3 = '{"k1": "v1", "k2": "v3"}'
- # doc4 has a different k2 prefix value, so it doesn't match
- content4 = '{"k1": "v1", "k2": "ZZ"}'
- self.db.create_index('test-idx', 'k1', 'k2')
- doc1 = self.db.create_doc_from_json(content1)
- doc2 = self.db.create_doc_from_json(content2)
- doc3 = self.db.create_doc_from_json(content3)
- self.db.create_doc_from_json(content4)
- self.assertEqual(
- sorted([doc1, doc2, doc3]),
- sorted(self.db.get_from_index('test-idx', "v1", "v*")))
-
- def test_nested_index(self):
- doc = self.db.create_doc_from_json(nested_doc)
- self.db.create_index('test-idx', 'sub.doc')
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'underneath'))
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertEqual(
- sorted([doc, doc2]),
- sorted(self.db.get_from_index('test-idx', 'underneath')))
-
- def test_nested_nonexistent(self):
- self.db.create_doc_from_json(nested_doc)
- # sub exists, but sub.foo does not:
- self.db.create_index('test-idx', 'sub.foo')
- self.assertEqual([], self.db.get_from_index('test-idx', '*'))
-
- def test_nested_nonexistent2(self):
- self.db.create_doc_from_json(nested_doc)
- self.db.create_index('test-idx', 'sub.foo.bar.baz.qux.fnord')
- self.assertEqual([], self.db.get_from_index('test-idx', '*'))
-
- def test_nested_traverses_lists(self):
- # subpath finds dicts in list
- doc = self.db.create_doc_from_json(
- '{"foo": [{"zap": "bar"}, {"zap": "baz"}]}')
- # subpath only finds dicts in list
- self.db.create_doc_from_json('{"foo": ["zap", "baz"]}')
- self.db.create_index('test-idx', 'foo.zap')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'bar'))
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'baz'))
-
- def test_nested_list_traversal(self):
- # subpath finds dicts in list
- doc = self.db.create_doc_from_json(
- '{"foo": [{"zap": [{"qux": "fnord"}, {"qux": "zombo"}]},'
- '{"zap": "baz"}]}')
- # subpath only finds dicts in list
- self.db.create_index('test-idx', 'foo.zap.qux')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'fnord'))
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'zombo'))
-
- def test_index_list1(self):
- self.db.create_index("index", "name")
- content = '{"name": ["foo", "bar"]}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "bar")
- self.assertEqual([doc], rows)
-
- def test_index_list2(self):
- self.db.create_index("index", "name")
- content = '{"name": ["foo", "bar"]}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_case_sensitive(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.assertEqual([], self.db.get_from_index('test-idx', 'V*'))
- self.assertEqual([doc1], self.db.get_from_index('test-idx', 'v*'))
-
- def test_get_from_index_illegal_glob_before_value(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_from_index, 'test-idx', 'v*', 'v2')
-
- def test_get_from_index_illegal_glob_after_glob(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_from_index, 'test-idx', 'v*', 'v*')
-
- def test_get_from_index_with_sql_wildcards(self):
- self.db.create_index('test-idx', 'key')
- content1 = '{"key": "va%lue"}'
- content2 = '{"key": "value"}'
- content3 = '{"key": "va_lue"}'
- doc1 = self.db.create_doc_from_json(content1)
- self.db.create_doc_from_json(content2)
- doc3 = self.db.create_doc_from_json(content3)
- # The '%' in the search should be treated literally, not as a sql
- # globbing character.
- self.assertEqual([doc1], self.db.get_from_index('test-idx', 'va%*'))
- # Same for '_'
- self.assertEqual([doc3], self.db.get_from_index('test-idx', 'va_*'))
-
- def test_get_from_index_with_lower(self):
- self.db.create_index("index", "lower(name)")
- content = '{"name": "Foo"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_lower_matches_same_case(self):
- self.db.create_index("index", "lower(name)")
- content = '{"name": "foo"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_index_lower_doesnt_match_different_case(self):
- self.db.create_index("index", "lower(name)")
- content = '{"name": "Foo"}'
- self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "Foo")
- self.assertEqual([], rows)
-
- def test_index_lower_doesnt_match_other_index(self):
- self.db.create_index("index", "lower(name)")
- self.db.create_index("other_index", "name")
- content = '{"name": "Foo"}'
- self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "Foo")
- self.assertEqual(0, len(rows))
-
- def test_index_split_words_match_first(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo bar"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_match_second(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo bar"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "bar")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_match_both(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo foo"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_double_space(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo bar"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "bar")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_leading_space(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": " foo bar"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_trailing_space(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo bar "}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "bar")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_number(self):
- self.db.create_index("index", "number(foo, 5)")
- content = '{"foo": 12}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "00012")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_number_bigger_than_padding(self):
- self.db.create_index("index", "number(foo, 5)")
- content = '{"foo": 123456}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "123456")
- self.assertEqual([doc], rows)
-
- def test_number_mapping_ignores_non_numbers(self):
- self.db.create_index("index", "number(foo, 5)")
- content = '{"foo": 56}'
- doc1 = self.db.create_doc_from_json(content)
- content = '{"foo": "this is not a maigret painting"}'
- self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "*")
- self.assertEqual([doc1], rows)
-
- def test_get_from_index_with_bool(self):
- self.db.create_index("index", "bool(foo)")
- content = '{"foo": true}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "1")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_bool_false(self):
- self.db.create_index("index", "bool(foo)")
- content = '{"foo": false}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "0")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_non_bool(self):
- self.db.create_index("index", "bool(foo)")
- content = '{"foo": 42}'
- self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "*")
- self.assertEqual([], rows)
-
- def test_get_from_index_with_combine(self):
- self.db.create_index("index", "combine(foo, bar)")
- content = '{"foo": "value1", "bar": "value2"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "value1")
- self.assertEqual([doc], rows)
- rows = self.db.get_from_index("index", "value2")
- self.assertEqual([doc], rows)
-
- def test_get_complex_combine(self):
- self.db.create_index(
- "index", "combine(number(foo, 5), lower(bar), split_words(baz))")
- content = '{"foo": 12, "bar": "ALLCAPS", "baz": "qux nox"}'
- doc = self.db.create_doc_from_json(content)
- content = '{"foo": "not a number", "bar": "something"}'
- doc2 = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "00012")
- self.assertEqual([doc], rows)
- rows = self.db.get_from_index("index", "allcaps")
- self.assertEqual([doc], rows)
- rows = self.db.get_from_index("index", "nox")
- self.assertEqual([doc], rows)
- rows = self.db.get_from_index("index", "something")
- self.assertEqual([doc2], rows)
-
- def test_get_index_keys_from_index(self):
- self.db.create_index('test-idx', 'key')
- content1 = '{"key": "value1"}'
- content2 = '{"key": "value2"}'
- content3 = '{"key": "value2"}'
- self.db.create_doc_from_json(content1)
- self.db.create_doc_from_json(content2)
- self.db.create_doc_from_json(content3)
- self.assertEqual(
- [('value1',), ('value2',)],
- sorted(self.db.get_index_keys('test-idx')))
-
- def test_get_index_keys_from_multicolumn_index(self):
- self.db.create_index('test-idx', 'key1', 'key2')
- content1 = '{"key1": "value1", "key2": "val2-1"}'
- content2 = '{"key1": "value2", "key2": "val2-2"}'
- content3 = '{"key1": "value2", "key2": "val2-2"}'
- content4 = '{"key1": "value2", "key2": "val3"}'
- self.db.create_doc_from_json(content1)
- self.db.create_doc_from_json(content2)
- self.db.create_doc_from_json(content3)
- self.db.create_doc_from_json(content4)
- self.assertEqual([
- ('value1', 'val2-1'),
- ('value2', 'val2-2'),
- ('value2', 'val3')],
- sorted(self.db.get_index_keys('test-idx')))
-
- def test_empty_expr(self):
- self.assertParseError('')
-
- def test_nested_unknown_operation(self):
- self.assertParseError('unknown_operation(field1)')
-
- def test_parse_missing_close_paren(self):
- self.assertParseError("lower(a")
-
- def test_parse_trailing_close_paren(self):
- self.assertParseError("lower(ab))")
-
- def test_parse_trailing_chars(self):
- self.assertParseError("lower(ab)adsf")
-
- def test_parse_empty_op(self):
- self.assertParseError("(ab)")
-
- def test_parse_top_level_commas(self):
- self.assertParseError("a, b")
-
- def test_invalid_field_name(self):
- self.assertParseError("a.")
-
- def test_invalid_inner_field_name(self):
- self.assertParseError("lower(a.)")
-
- def test_gobbledigook(self):
- self.assertParseError("(@#@cc @#!*DFJSXV(()jccd")
-
- def test_leading_space(self):
- self.assertIndexCreatable(" lower(a)")
-
- def test_trailing_space(self):
- self.assertIndexCreatable("lower(a) ")
-
- def test_spaces_before_open_paren(self):
- self.assertIndexCreatable("lower (a)")
-
- def test_spaces_after_open_paren(self):
- self.assertIndexCreatable("lower( a)")
-
- def test_spaces_before_close_paren(self):
- self.assertIndexCreatable("lower(a )")
-
- def test_spaces_before_comma(self):
- self.assertIndexCreatable("combine(a , b , c)")
-
- def test_spaces_after_comma(self):
- self.assertIndexCreatable("combine(a, b, c)")
-
- def test_all_together_now(self):
- self.assertParseError(' (a) ')
-
- def test_all_together_now2(self):
- self.assertParseError('combine(lower(x)x,foo)')
-
-
-class PythonBackendTests(tests.DatabaseBaseTests):
-
- def setUp(self):
- super(PythonBackendTests, self).setUp()
- self.simple_doc = json.loads(simple_doc)
-
- def test_create_doc_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- doc = self.db.create_doc(self.simple_doc, doc_id='my_doc_id')
- self.assertTrue(isinstance(doc, TestAlternativeDocument))
-
- def test_get_doc_after_put_with_factory(self):
- doc = self.db.create_doc(self.simple_doc, doc_id='my_doc_id')
- self.db.set_document_factory(TestAlternativeDocument)
- result = self.db.get_doc('my_doc_id')
- self.assertTrue(isinstance(result, TestAlternativeDocument))
- self.assertEqual(doc.doc_id, result.doc_id)
- self.assertEqual(doc.rev, result.rev)
- self.assertEqual(doc.get_json(), result.get_json())
- self.assertEqual(False, result.has_conflicts)
-
- def test_get_doc_nonexisting_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- self.assertIs(None, self.db.get_doc('non-existing'))
-
- def test_get_all_docs_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- self.db.create_doc(self.simple_doc)
- self.assertTrue(isinstance(
- list(self.db.get_all_docs()[1])[0], TestAlternativeDocument))
-
- def test_get_docs_conflicted_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- doc1 = self.db.create_doc(self.simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertTrue(
- isinstance(
- list(self.db.get_docs([doc1.doc_id]))[0],
- TestAlternativeDocument))
-
- def test_get_from_index_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- self.db.create_doc(self.simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertTrue(
- isinstance(
- self.db.get_from_index('test-idx', 'value')[0],
- TestAlternativeDocument))
-
- def test_sync_exchange_updates_indexes(self):
- doc = self.db.create_doc(self.simple_doc)
- self.db.create_index('test-idx', 'key')
- new_content = '{"key": "altval"}'
- other_rev = 'test:1|z:2'
- st = self.db.get_sync_target()
-
- def ignore(doc_id, doc_rev, doc):
- pass
-
- doc_other = self.make_document(doc.doc_id, other_rev, new_content)
- docs_by_gen = [(doc_other, 10, 'T-sid')]
- st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=ignore)
- self.assertGetDoc(self.db, doc.doc_id, other_rev, new_content, False)
- self.assertEqual(
- [doc_other], self.db.get_from_index('test-idx', 'altval'))
- self.assertEqual([], self.db.get_from_index('test-idx', 'value'))
-
-
-# Use a custom loader to apply the scenarios at load time.
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/u1db/tests/test_c_backend.py b/src/leap/soledad/u1db/tests/test_c_backend.py
deleted file mode 100644
index bdd2aec7..00000000
--- a/src/leap/soledad/u1db/tests/test_c_backend.py
+++ /dev/null
@@ -1,634 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-from u1db import (
- Document,
- errors,
- tests,
- )
-from u1db.tests import c_backend_wrapper, c_backend_error
-from u1db.tests.test_remote_sync_target import (
- make_http_app,
- make_oauth_http_app
- )
-
-
-class TestCDatabaseExists(tests.TestCase):
-
- def test_c_backend_compiled(self):
- if c_backend_wrapper is None:
- self.fail("Could not import the c_backend_wrapper module."
- " Was it compiled properly?\n%s" % (c_backend_error,))
-
-
-# Rather than lots of failing tests, we have the above check to test that the
-# module exists, and all these tests just get skipped
-class BackendTests(tests.TestCase):
-
- def setUp(self):
- super(BackendTests, self).setUp()
- if c_backend_wrapper is None:
- self.skipTest("The c_backend_wrapper could not be imported")
-
-
-class TestCDatabase(BackendTests):
-
- def test_exists(self):
- if c_backend_wrapper is None:
- self.fail("Could not import the c_backend_wrapper module."
- " Was it compiled properly?")
- db = c_backend_wrapper.CDatabase(':memory:')
- self.assertEqual(':memory:', db._filename)
-
- def test__is_closed(self):
- db = c_backend_wrapper.CDatabase(':memory:')
- self.assertTrue(db._sql_is_open())
- db.close()
- self.assertFalse(db._sql_is_open())
-
- def test__run_sql(self):
- db = c_backend_wrapper.CDatabase(':memory:')
- self.assertTrue(db._sql_is_open())
- self.assertEqual([], db._run_sql('CREATE TABLE test (id INTEGER)'))
- self.assertEqual([], db._run_sql('INSERT INTO test VALUES (1)'))
- self.assertEqual([('1',)], db._run_sql('SELECT * FROM test'))
-
- def test__get_generation(self):
- db = c_backend_wrapper.CDatabase(':memory:')
- self.assertEqual(0, db._get_generation())
- db.create_doc_from_json(tests.simple_doc)
- self.assertEqual(1, db._get_generation())
-
- def test__get_generation_info(self):
- db = c_backend_wrapper.CDatabase(':memory:')
- self.assertEqual((0, ''), db._get_generation_info())
- db.create_doc_from_json(tests.simple_doc)
- info = db._get_generation_info()
- self.assertEqual(1, info[0])
- self.assertTrue(info[1].startswith('T-'))
-
- def test__set_replica_uid(self):
- db = c_backend_wrapper.CDatabase(':memory:')
- self.assertIsNot(None, db._replica_uid)
- db._set_replica_uid('foo')
- self.assertEqual([('foo',)], db._run_sql(
- "SELECT value FROM u1db_config WHERE name='replica_uid'"))
-
- def test_default_replica_uid(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- self.assertIsNot(None, self.db._replica_uid)
- self.assertEqual(32, len(self.db._replica_uid))
- # casting to an int from the uid *is* the check for correct behavior.
- int(self.db._replica_uid, 16)
-
- def test_get_conflicts_with_borked_data(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- # We add an entry to conflicts, but not to documents, which is an
- # invalid situation
- self.db._run_sql("INSERT INTO conflicts"
- " VALUES ('doc-id', 'doc-rev', '{}')")
- self.assertRaises(Exception, self.db.get_doc_conflicts, 'doc-id')
-
- def test_create_index_list(self):
- # We manually poke data into the DB, so that we test just the "get_doc"
- # code, rather than also testing the index management code.
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.db.create_index_list("key-idx", ["key"])
- docs = self.db.get_from_index('key-idx', 'value')
- self.assertEqual([doc], docs)
-
- def test_create_index_list_on_non_ascii_field_name(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc = self.db.create_doc_from_json(json.dumps({u'\xe5': 'value'}))
- self.db.create_index_list('test-idx', [u'\xe5'])
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_list_indexes_with_non_ascii_field_names(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- self.db.create_index_list('test-idx', [u'\xe5'])
- self.assertEqual(
- [('test-idx', [u'\xe5'])], self.db.list_indexes())
-
- def test_create_index_evaluates_it(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.db.create_index_list('test-idx', ['key'])
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_wildcard_matches_unicode_value(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc = self.db.create_doc_from_json(json.dumps({"key": u"valu\xe5"}))
- self.db.create_index_list('test-idx', ['key'])
- self.assertEqual([doc], self.db.get_from_index('test-idx', '*'))
-
- def test_create_index_fails_if_name_taken(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- self.db.create_index_list('test-idx', ['key'])
- self.assertRaises(errors.IndexNameTakenError,
- self.db.create_index_list,
- 'test-idx', ['stuff'])
-
- def test_create_index_does_not_fail_if_name_taken_with_same_index(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- self.db.create_index_list('test-idx', ['key'])
- self.db.create_index_list('test-idx', ['key'])
- self.assertEqual([('test-idx', ['key'])], self.db.list_indexes())
-
- def test_create_index_after_deleting_document(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc = self.db.create_doc_from_json(tests.simple_doc)
- doc2 = self.db.create_doc_from_json(tests.simple_doc)
- self.db.delete_doc(doc2)
- self.db.create_index_list('test-idx', ['key'])
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_get_from_index(self):
- # We manually poke data into the DB, so that we test just the "get_doc"
- # code, rather than also testing the index management code.
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.db.create_index("key-idx", "key")
- docs = self.db.get_from_index('key-idx', 'value')
- self.assertEqual([doc], docs)
-
- def test_get_from_index_list(self):
- # We manually poke data into the DB, so that we test just the "get_doc"
- # code, rather than also testing the index management code.
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc = self.db.create_doc_from_json(tests.simple_doc)
- self.db.create_index("key-idx", "key")
- docs = self.db.get_from_index_list('key-idx', ['value'])
- self.assertEqual([doc], docs)
-
- def test_get_from_index_list_multi(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- content = '{"key": "value", "key2": "value2"}'
- doc = self.db.create_doc_from_json(content)
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc],
- self.db.get_from_index_list('test-idx', ['value', 'value2']))
-
- def test_get_from_index_list_multi_ordered(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2, doc1],
- self.db.get_from_index_list('test-idx', ['v*', '*']))
-
- def test_get_from_index_2(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- doc = self.db.create_doc_from_json(tests.nested_doc)
- self.db.create_index("multi-idx", "key", "sub.doc")
- docs = self.db.get_from_index('multi-idx', 'value', 'underneath')
- self.assertEqual([doc], docs)
-
- def test_get_index_keys(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- self.db.create_doc_from_json(tests.simple_doc)
- self.db.create_index("key-idx", "key")
- keys = self.db.get_index_keys('key-idx')
- self.assertEqual([("value",)], keys)
-
- def test__query_init_one_field(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- self.db.create_index("key-idx", "key")
- query = self.db._query_init("key-idx")
- self.assertEqual("key-idx", query.index_name)
- self.assertEqual(1, query.num_fields)
- self.assertEqual(["key"], query.fields)
-
- def test__query_init_two_fields(self):
- self.db = c_backend_wrapper.CDatabase(':memory:')
- self.db.create_index("two-idx", "key", "key2")
- query = self.db._query_init("two-idx")
- self.assertEqual("two-idx", query.index_name)
- self.assertEqual(2, query.num_fields)
- self.assertEqual(["key", "key2"], query.fields)
-
- def assertFormatQueryEquals(self, expected, wildcards, fields):
- val, w = c_backend_wrapper._format_query(fields)
- self.assertEqual(expected, val)
- self.assertEqual(wildcards, w)
-
- def test__format_query(self):
- self.assertFormatQueryEquals(
- "SELECT d0.doc_id FROM document_fields d0"
- " WHERE d0.field_name = ? AND d0.value = ? ORDER BY d0.value",
- [0], ["1"])
- self.assertFormatQueryEquals(
- "SELECT d0.doc_id"
- " FROM document_fields d0, document_fields d1"
- " WHERE d0.field_name = ? AND d0.value = ?"
- " AND d0.doc_id = d1.doc_id"
- " AND d1.field_name = ? AND d1.value = ?"
- " ORDER BY d0.value, d1.value",
- [0, 0], ["1", "2"])
- self.assertFormatQueryEquals(
- "SELECT d0.doc_id"
- " FROM document_fields d0, document_fields d1, document_fields d2"
- " WHERE d0.field_name = ? AND d0.value = ?"
- " AND d0.doc_id = d1.doc_id"
- " AND d1.field_name = ? AND d1.value = ?"
- " AND d0.doc_id = d2.doc_id"
- " AND d2.field_name = ? AND d2.value = ?"
- " ORDER BY d0.value, d1.value, d2.value",
- [0, 0, 0], ["1", "2", "3"])
-
- def test__format_query_wildcard(self):
- self.assertFormatQueryEquals(
- "SELECT d0.doc_id FROM document_fields d0"
- " WHERE d0.field_name = ? AND d0.value NOT NULL ORDER BY d0.value",
- [1], ["*"])
- self.assertFormatQueryEquals(
- "SELECT d0.doc_id"
- " FROM document_fields d0, document_fields d1"
- " WHERE d0.field_name = ? AND d0.value = ?"
- " AND d0.doc_id = d1.doc_id"
- " AND d1.field_name = ? AND d1.value NOT NULL"
- " ORDER BY d0.value, d1.value",
- [0, 1], ["1", "*"])
-
- def test__format_query_glob(self):
- self.assertFormatQueryEquals(
- "SELECT d0.doc_id FROM document_fields d0"
- " WHERE d0.field_name = ? AND d0.value GLOB ? ORDER BY d0.value",
- [2], ["1*"])
-
-
-class TestCSyncTarget(BackendTests):
-
- def setUp(self):
- super(TestCSyncTarget, self).setUp()
- self.db = c_backend_wrapper.CDatabase(':memory:')
- self.st = self.db.get_sync_target()
-
- def test_attached_to_db(self):
- self.assertEqual(
- self.db._replica_uid, self.st.get_sync_info("misc")[0])
-
- def test_get_sync_exchange(self):
- exc = self.st._get_sync_exchange("source-uid", 10)
- self.assertIsNot(None, exc)
-
- def test_sync_exchange_insert_doc_from_source(self):
- exc = self.st._get_sync_exchange("source-uid", 5)
- doc = c_backend_wrapper.make_document('doc-id', 'replica:1',
- tests.simple_doc)
- self.assertEqual([], exc.get_seen_ids())
- exc.insert_doc_from_source(doc, 10, 'T-sid')
- self.assertGetDoc(self.db, 'doc-id', 'replica:1', tests.simple_doc,
- False)
- self.assertEqual(
- (10, 'T-sid'), self.db._get_replica_gen_and_trans_id('source-uid'))
- self.assertEqual(['doc-id'], exc.get_seen_ids())
-
- def test_sync_exchange_conflicted_doc(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- exc = self.st._get_sync_exchange("source-uid", 5)
- doc2 = c_backend_wrapper.make_document(doc.doc_id, 'replica:1',
- tests.nested_doc)
- self.assertEqual([], exc.get_seen_ids())
- # The insert should be rejected and the doc_id not considered 'seen'
- exc.insert_doc_from_source(doc2, 10, 'T-sid')
- self.assertGetDoc(
- self.db, doc.doc_id, doc.rev, tests.simple_doc, False)
- self.assertEqual([], exc.get_seen_ids())
-
- def test_sync_exchange_find_doc_ids(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- exc = self.st._get_sync_exchange("source-uid", 0)
- self.assertEqual(0, exc.target_gen)
- exc.find_doc_ids_to_return()
- doc_id = exc.get_doc_ids_to_return()[0]
- self.assertEqual(
- (doc.doc_id, 1), doc_id[:-1])
- self.assertTrue(doc_id[-1].startswith('T-'))
- self.assertEqual(1, exc.target_gen)
-
- def test_sync_exchange_find_doc_ids_not_including_recently_inserted(self):
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- doc2 = self.db.create_doc_from_json(tests.nested_doc)
- exc = self.st._get_sync_exchange("source-uid", 0)
- doc3 = c_backend_wrapper.make_document(doc1.doc_id,
- doc1.rev + "|zreplica:2", tests.simple_doc)
- exc.insert_doc_from_source(doc3, 10, 'T-sid')
- exc.find_doc_ids_to_return()
- self.assertEqual(
- (doc2.doc_id, 2), exc.get_doc_ids_to_return()[0][:-1])
- self.assertEqual(3, exc.target_gen)
-
- def test_sync_exchange_return_docs(self):
- returned = []
-
- def return_doc_cb(doc, gen, trans_id):
- returned.append((doc, gen, trans_id))
-
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- exc = self.st._get_sync_exchange("source-uid", 0)
- exc.find_doc_ids_to_return()
- exc.return_docs(return_doc_cb)
- self.assertEqual((doc1, 1), returned[0][:-1])
-
- def test_sync_exchange_doc_ids(self):
- doc1 = self.db.create_doc_from_json(tests.simple_doc, doc_id='doc-1')
- db2 = c_backend_wrapper.CDatabase(':memory:')
- doc2 = db2.create_doc_from_json(tests.nested_doc, doc_id='doc-2')
- returned = []
-
- def return_doc_cb(doc, gen, trans_id):
- returned.append((doc, gen, trans_id))
-
- val = self.st.sync_exchange_doc_ids(
- db2, [(doc2.doc_id, 1, 'T-sid')], 0, None, return_doc_cb)
- last_trans_id = self.db._get_transaction_log()[-1][1]
- self.assertEqual(2, self.db._get_generation())
- self.assertEqual((2, last_trans_id), val)
- self.assertGetDoc(self.db, doc2.doc_id, doc2.rev, tests.nested_doc,
- False)
- self.assertEqual((doc1, 1), returned[0][:-1])
-
-
-class TestCHTTPSyncTarget(BackendTests):
-
- def test_format_sync_url(self):
- target = c_backend_wrapper.create_http_sync_target("http://base_url")
- self.assertEqual("http://base_url/sync-from/replica-uid",
- c_backend_wrapper._format_sync_url(target, "replica-uid"))
-
- def test_format_sync_url_escapes(self):
- # The base_url should not get munged (we assume it is already a
- # properly formed URL), but the replica-uid should get properly escaped
- target = c_backend_wrapper.create_http_sync_target(
- "http://host/base%2Ctest/")
- self.assertEqual("http://host/base%2Ctest/sync-from/replica%2Cuid",
- c_backend_wrapper._format_sync_url(target, "replica,uid"))
-
- def test_format_refuses_non_http(self):
- db = c_backend_wrapper.CDatabase(':memory:')
- target = db.get_sync_target()
- self.assertRaises(RuntimeError,
- c_backend_wrapper._format_sync_url, target, 'replica,uid')
-
- def test_oauth_credentials(self):
- target = c_backend_wrapper.create_oauth_http_sync_target(
- "http://host/base%2Ctest/",
- 'consumer-key', 'consumer-secret', 'token-key', 'token-secret')
- auth = c_backend_wrapper._get_oauth_authorization(target,
- "GET", "http://host/base%2Ctest/sync-from/abcd-efg")
- self.assertIsNot(None, auth)
- self.assertTrue(auth.startswith('Authorization: OAuth realm="", '))
- self.assertNotIn('http://host/base', auth)
- self.assertIn('oauth_nonce="', auth)
- self.assertIn('oauth_timestamp="', auth)
- self.assertIn('oauth_consumer_key="consumer-key"', auth)
- self.assertIn('oauth_signature_method="HMAC-SHA1"', auth)
- self.assertIn('oauth_version="1.0"', auth)
- self.assertIn('oauth_token="token-key"', auth)
- self.assertIn('oauth_signature="', auth)
-
-
-class TestSyncCtoHTTPViaC(tests.TestCaseWithServer):
-
- make_app_with_state = staticmethod(make_http_app)
-
- def setUp(self):
- super(TestSyncCtoHTTPViaC, self).setUp()
- if c_backend_wrapper is None:
- self.skipTest("The c_backend_wrapper could not be imported")
- self.startServer()
-
- def test_trivial_sync(self):
- mem_db = self.request_state._create_database('test.db')
- mem_doc = mem_db.create_doc_from_json(tests.nested_doc)
- url = self.getURL('test.db')
- target = c_backend_wrapper.create_http_sync_target(url)
- db = c_backend_wrapper.CDatabase(':memory:')
- doc = db.create_doc_from_json(tests.simple_doc)
- c_backend_wrapper.sync_db_to_target(db, target)
- self.assertGetDoc(mem_db, doc.doc_id, doc.rev, doc.get_json(), False)
- self.assertGetDoc(db, mem_doc.doc_id, mem_doc.rev, mem_doc.get_json(),
- False)
-
- def test_unavailable(self):
- mem_db = self.request_state._create_database('test.db')
- mem_db.create_doc_from_json(tests.nested_doc)
- tries = []
-
- def wrapper(instance, *args, **kwargs):
- tries.append(None)
- raise errors.Unavailable
-
- mem_db.whats_changed = wrapper
- url = self.getURL('test.db')
- target = c_backend_wrapper.create_http_sync_target(url)
- db = c_backend_wrapper.CDatabase(':memory:')
- db.create_doc_from_json(tests.simple_doc)
- self.assertRaises(
- errors.Unavailable, c_backend_wrapper.sync_db_to_target, db,
- target)
- self.assertEqual(5, len(tries))
-
- def test_unavailable_then_available(self):
- mem_db = self.request_state._create_database('test.db')
- mem_doc = mem_db.create_doc_from_json(tests.nested_doc)
- orig_whatschanged = mem_db.whats_changed
- tries = []
-
- def wrapper(instance, *args, **kwargs):
- if len(tries) < 1:
- tries.append(None)
- raise errors.Unavailable
- return orig_whatschanged(instance, *args, **kwargs)
-
- mem_db.whats_changed = wrapper
- url = self.getURL('test.db')
- target = c_backend_wrapper.create_http_sync_target(url)
- db = c_backend_wrapper.CDatabase(':memory:')
- doc = db.create_doc_from_json(tests.simple_doc)
- c_backend_wrapper.sync_db_to_target(db, target)
- self.assertEqual(1, len(tries))
- self.assertGetDoc(mem_db, doc.doc_id, doc.rev, doc.get_json(), False)
- self.assertGetDoc(db, mem_doc.doc_id, mem_doc.rev, mem_doc.get_json(),
- False)
-
- def test_db_sync(self):
- mem_db = self.request_state._create_database('test.db')
- mem_doc = mem_db.create_doc_from_json(tests.nested_doc)
- url = self.getURL('test.db')
- db = c_backend_wrapper.CDatabase(':memory:')
- doc = db.create_doc_from_json(tests.simple_doc)
- local_gen_before_sync = db.sync(url)
- gen, _, changes = db.whats_changed(local_gen_before_sync)
- self.assertEqual(1, len(changes))
- self.assertEqual(mem_doc.doc_id, changes[0][0])
- self.assertEqual(1, gen - local_gen_before_sync)
- self.assertEqual(1, local_gen_before_sync)
- self.assertGetDoc(mem_db, doc.doc_id, doc.rev, doc.get_json(), False)
- self.assertGetDoc(db, mem_doc.doc_id, mem_doc.rev, mem_doc.get_json(),
- False)
-
-
-class TestSyncCtoOAuthHTTPViaC(tests.TestCaseWithServer):
-
- make_app_with_state = staticmethod(make_oauth_http_app)
-
- def setUp(self):
- super(TestSyncCtoOAuthHTTPViaC, self).setUp()
- if c_backend_wrapper is None:
- self.skipTest("The c_backend_wrapper could not be imported")
- self.startServer()
-
- def test_trivial_sync(self):
- mem_db = self.request_state._create_database('test.db')
- mem_doc = mem_db.create_doc_from_json(tests.nested_doc)
- url = self.getURL('~/test.db')
- target = c_backend_wrapper.create_oauth_http_sync_target(url,
- tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- db = c_backend_wrapper.CDatabase(':memory:')
- doc = db.create_doc_from_json(tests.simple_doc)
- c_backend_wrapper.sync_db_to_target(db, target)
- self.assertGetDoc(mem_db, doc.doc_id, doc.rev, doc.get_json(), False)
- self.assertGetDoc(db, mem_doc.doc_id, mem_doc.rev, mem_doc.get_json(),
- False)
-
-
-class TestVectorClock(BackendTests):
-
- def create_vcr(self, rev):
- return c_backend_wrapper.VectorClockRev(rev)
-
- def test_parse_empty(self):
- self.assertEqual('VectorClockRev()',
- repr(self.create_vcr('')))
-
- def test_parse_invalid(self):
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('x')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('x:a')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1|x:a')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('x:a|y:1')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1|x:2a')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1||')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1|')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1|x:2|')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1|x:2|:')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1|x:2|m:')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1|x:|m:3')))
- self.assertEqual('VectorClockRev(None)',
- repr(self.create_vcr('y:1|:|m:3')))
-
- def test_parse_single(self):
- self.assertEqual('VectorClockRev(test:1)',
- repr(self.create_vcr('test:1')))
-
- def test_parse_multi(self):
- self.assertEqual('VectorClockRev(test:1|z:2)',
- repr(self.create_vcr('test:1|z:2')))
- self.assertEqual('VectorClockRev(ab:1|bc:2|cd:3|de:4|ef:5)',
- repr(self.create_vcr('ab:1|bc:2|cd:3|de:4|ef:5')))
- self.assertEqual('VectorClockRev(a:2|b:1)',
- repr(self.create_vcr('b:1|a:2')))
-
-
-class TestCDocument(BackendTests):
-
- def make_document(self, *args, **kwargs):
- return c_backend_wrapper.make_document(*args, **kwargs)
-
- def test_create(self):
- self.make_document('doc-id', 'uid:1', tests.simple_doc)
-
- def assertPyDocEqualCDoc(self, *args, **kwargs):
- cdoc = self.make_document(*args, **kwargs)
- pydoc = Document(*args, **kwargs)
- self.assertEqual(pydoc, cdoc)
- self.assertEqual(cdoc, pydoc)
-
- def test_cmp_to_pydoc_equal(self):
- self.assertPyDocEqualCDoc('doc-id', 'uid:1', tests.simple_doc)
- self.assertPyDocEqualCDoc('doc-id', 'uid:1', tests.simple_doc,
- has_conflicts=False)
- self.assertPyDocEqualCDoc('doc-id', 'uid:1', tests.simple_doc,
- has_conflicts=True)
-
- def test_cmp_to_pydoc_not_equal_conflicts(self):
- cdoc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
- pydoc = Document('doc-id', 'uid:1', tests.simple_doc,
- has_conflicts=True)
- self.assertNotEqual(cdoc, pydoc)
- self.assertNotEqual(pydoc, cdoc)
-
- def test_cmp_to_pydoc_not_equal_doc_id(self):
- cdoc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
- pydoc = Document('doc2-id', 'uid:1', tests.simple_doc)
- self.assertNotEqual(cdoc, pydoc)
- self.assertNotEqual(pydoc, cdoc)
-
- def test_cmp_to_pydoc_not_equal_doc_rev(self):
- cdoc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
- pydoc = Document('doc-id', 'uid:2', tests.simple_doc)
- self.assertNotEqual(cdoc, pydoc)
- self.assertNotEqual(pydoc, cdoc)
-
- def test_cmp_to_pydoc_not_equal_content(self):
- cdoc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
- pydoc = Document('doc-id', 'uid:1', tests.nested_doc)
- self.assertNotEqual(cdoc, pydoc)
- self.assertNotEqual(pydoc, cdoc)
-
-
-class TestUUID(BackendTests):
-
- def test_uuid4_conformance(self):
- uuids = set()
- for i in range(20):
- uuid = c_backend_wrapper.generate_hex_uuid()
- self.assertIsInstance(uuid, str)
- self.assertEqual(32, len(uuid))
- # This will raise ValueError if it isn't a valid hex string
- long(uuid, 16)
- # Version 4 uuids have 2 other requirements, the high 4 bits of the
- # seventh byte are always '0x4', and the middle bits of byte 9 are
- # always set
- self.assertEqual('4', uuid[12])
- self.assertTrue(uuid[16] in '89ab')
- self.assertTrue(uuid not in uuids)
- uuids.add(uuid)
diff --git a/src/leap/soledad/u1db/tests/test_common_backend.py b/src/leap/soledad/u1db/tests/test_common_backend.py
deleted file mode 100644
index 8c7c7ed9..00000000
--- a/src/leap/soledad/u1db/tests/test_common_backend.py
+++ /dev/null
@@ -1,33 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test common backend bits."""
-
-from u1db import (
- backends,
- tests,
- )
-
-
-class TestCommonBackendImpl(tests.TestCase):
-
- def test__allocate_doc_id(self):
- db = backends.CommonBackend()
- doc_id1 = db._allocate_doc_id()
- self.assertTrue(doc_id1.startswith('D-'))
- self.assertEqual(34, len(doc_id1))
- int(doc_id1[len('D-'):], 16)
- self.assertNotEqual(doc_id1, db._allocate_doc_id())
diff --git a/src/leap/soledad/u1db/tests/test_document.py b/src/leap/soledad/u1db/tests/test_document.py
deleted file mode 100644
index 20f254b9..00000000
--- a/src/leap/soledad/u1db/tests/test_document.py
+++ /dev/null
@@ -1,148 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-
-from u1db import errors, tests
-
-
-class TestDocument(tests.TestCase):
-
- scenarios = ([(
- 'py', {'make_document_for_test': tests.make_document_for_test})] +
- tests.C_DATABASE_SCENARIOS)
-
- def test_create_doc(self):
- doc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
- self.assertEqual('doc-id', doc.doc_id)
- self.assertEqual('uid:1', doc.rev)
- self.assertEqual(tests.simple_doc, doc.get_json())
- self.assertFalse(doc.has_conflicts)
-
- def test__repr__(self):
- doc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
- self.assertEqual(
- '%s(doc-id, uid:1, \'{"key": "value"}\')'
- % (doc.__class__.__name__,),
- repr(doc))
-
- def test__repr__conflicted(self):
- doc = self.make_document('doc-id', 'uid:1', tests.simple_doc,
- has_conflicts=True)
- self.assertEqual(
- '%s(doc-id, uid:1, conflicted, \'{"key": "value"}\')'
- % (doc.__class__.__name__,),
- repr(doc))
-
- def test__lt__(self):
- doc_a = self.make_document('a', 'b', '{}')
- doc_b = self.make_document('b', 'b', '{}')
- self.assertTrue(doc_a < doc_b)
- self.assertTrue(doc_b > doc_a)
- doc_aa = self.make_document('a', 'a', '{}')
- self.assertTrue(doc_aa < doc_a)
-
- def test__eq__(self):
- doc_a = self.make_document('a', 'b', '{}')
- doc_b = self.make_document('a', 'b', '{}')
- self.assertTrue(doc_a == doc_b)
- doc_b = self.make_document('a', 'b', '{}', has_conflicts=True)
- self.assertFalse(doc_a == doc_b)
-
- def test_non_json_dict(self):
- self.assertRaises(
- errors.InvalidJSON, self.make_document, 'id', 'uid:1',
- '"not a json dictionary"')
-
- def test_non_json(self):
- self.assertRaises(
- errors.InvalidJSON, self.make_document, 'id', 'uid:1',
- 'not a json dictionary')
-
- def test_get_size(self):
- doc_a = self.make_document('a', 'b', '{"some": "content"}')
- self.assertEqual(
- len('a' + 'b' + '{"some": "content"}'), doc_a.get_size())
-
- def test_get_size_empty_document(self):
- doc_a = self.make_document('a', 'b', None)
- self.assertEqual(len('a' + 'b'), doc_a.get_size())
-
-
-class TestPyDocument(tests.TestCase):
-
- scenarios = ([(
- 'py', {'make_document_for_test': tests.make_document_for_test})])
-
- def test_get_content(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- self.assertEqual({"content": ""}, doc.content)
- doc.set_json('{"content": "new"}')
- self.assertEqual({"content": "new"}, doc.content)
-
- def test_set_content(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- doc.content = {"content": "new"}
- self.assertEqual('{"content": "new"}', doc.get_json())
-
- def test_set_bad_content(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- self.assertRaises(
- errors.InvalidContent, setattr, doc, 'content',
- '{"content": "new"}')
-
- def test_is_tombstone(self):
- doc_a = self.make_document('a', 'b', '{}')
- self.assertFalse(doc_a.is_tombstone())
- doc_a.set_json(None)
- self.assertTrue(doc_a.is_tombstone())
-
- def test_make_tombstone(self):
- doc_a = self.make_document('a', 'b', '{}')
- self.assertFalse(doc_a.is_tombstone())
- doc_a.make_tombstone()
- self.assertTrue(doc_a.is_tombstone())
-
- def test_same_content_as(self):
- doc_a = self.make_document('a', 'b', '{}')
- doc_b = self.make_document('d', 'e', '{}')
- self.assertTrue(doc_a.same_content_as(doc_b))
- doc_b = self.make_document('p', 'q', '{}', has_conflicts=True)
- self.assertTrue(doc_a.same_content_as(doc_b))
- doc_b.content['key'] = 'value'
- self.assertFalse(doc_a.same_content_as(doc_b))
-
- def test_same_content_as_json_order(self):
- doc_a = self.make_document(
- 'a', 'b', '{"key1": "val1", "key2": "val2"}')
- doc_b = self.make_document(
- 'c', 'd', '{"key2": "val2", "key1": "val1"}')
- self.assertTrue(doc_a.same_content_as(doc_b))
-
- def test_set_json(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- doc.set_json('{"content": "new"}')
- self.assertEqual('{"content": "new"}', doc.get_json())
-
- def test_set_json_non_dict(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- self.assertRaises(errors.InvalidJSON, doc.set_json, '"is not a dict"')
-
- def test_set_json_error(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- self.assertRaises(errors.InvalidJSON, doc.set_json, 'is not json')
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/u1db/tests/test_errors.py b/src/leap/soledad/u1db/tests/test_errors.py
deleted file mode 100644
index 0e089ede..00000000
--- a/src/leap/soledad/u1db/tests/test_errors.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests error infrastructure."""
-
-from u1db import (
- errors,
- tests,
- )
-
-
-class TestError(tests.TestCase):
-
- def test_error_base(self):
- err = errors.U1DBError()
- self.assertEqual("error", err.wire_description)
- self.assertIs(None, err.message)
-
- err = errors.U1DBError("Message.")
- self.assertEqual("error", err.wire_description)
- self.assertEqual("Message.", err.message)
-
- def test_HTTPError(self):
- err = errors.HTTPError(500)
- self.assertEqual(500, err.status)
- self.assertIs(None, err.wire_description)
- self.assertIs(None, err.message)
-
- err = errors.HTTPError(500, "Crash.")
- self.assertEqual(500, err.status)
- self.assertIs(None, err.wire_description)
- self.assertEqual("Crash.", err.message)
-
- def test_HTTPError_str(self):
- err = errors.HTTPError(500)
- self.assertEqual("HTTPError(500)", str(err))
-
- err = errors.HTTPError(500, "ERROR")
- self.assertEqual("HTTPError(500, 'ERROR')", str(err))
-
- def test_Unvailable(self):
- err = errors.Unavailable()
- self.assertEqual(503, err.status)
- self.assertEqual("Unavailable()", str(err))
-
- err = errors.Unavailable("DOWN")
- self.assertEqual("DOWN", err.message)
- self.assertEqual("Unavailable('DOWN')", str(err))
diff --git a/src/leap/soledad/u1db/tests/test_http_app.py b/src/leap/soledad/u1db/tests/test_http_app.py
deleted file mode 100644
index 13522693..00000000
--- a/src/leap/soledad/u1db/tests/test_http_app.py
+++ /dev/null
@@ -1,1133 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test the WSGI app."""
-
-import paste.fixture
-import sys
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-import StringIO
-
-from u1db import (
- __version__ as _u1db_version,
- errors,
- sync,
- tests,
- )
-
-from u1db.remote import (
- http_app,
- http_errors,
- )
-
-
-class TestFencedReader(tests.TestCase):
-
- def test_init(self):
- reader = http_app._FencedReader(StringIO.StringIO(""), 25, 100)
- self.assertEqual(25, reader.remaining)
-
- def test_read_chunk(self):
- inp = StringIO.StringIO("abcdef")
- reader = http_app._FencedReader(inp, 5, 10)
- data = reader.read_chunk(2)
- self.assertEqual("ab", data)
- self.assertEqual(2, inp.tell())
- self.assertEqual(3, reader.remaining)
-
- def test_read_chunk_remaining(self):
- inp = StringIO.StringIO("abcdef")
- reader = http_app._FencedReader(inp, 4, 10)
- data = reader.read_chunk(9999)
- self.assertEqual("abcd", data)
- self.assertEqual(4, inp.tell())
- self.assertEqual(0, reader.remaining)
-
- def test_read_chunk_nothing_left(self):
- inp = StringIO.StringIO("abc")
- reader = http_app._FencedReader(inp, 2, 10)
- reader.read_chunk(2)
- self.assertEqual(2, inp.tell())
- self.assertEqual(0, reader.remaining)
- data = reader.read_chunk(2)
- self.assertEqual("", data)
- self.assertEqual(2, inp.tell())
- self.assertEqual(0, reader.remaining)
-
- def test_read_chunk_kept(self):
- inp = StringIO.StringIO("abcde")
- reader = http_app._FencedReader(inp, 4, 10)
- reader._kept = "xyz"
- data = reader.read_chunk(2) # atmost ignored
- self.assertEqual("xyz", data)
- self.assertEqual(0, inp.tell())
- self.assertEqual(4, reader.remaining)
- self.assertIsNone(reader._kept)
-
- def test_getline(self):
- inp = StringIO.StringIO("abc\r\nde")
- reader = http_app._FencedReader(inp, 6, 10)
- reader.MAXCHUNK = 6
- line = reader.getline()
- self.assertEqual("abc\r\n", line)
- self.assertEqual("d", reader._kept)
-
- def test_getline_exact(self):
- inp = StringIO.StringIO("abcd\r\nef")
- reader = http_app._FencedReader(inp, 6, 10)
- reader.MAXCHUNK = 6
- line = reader.getline()
- self.assertEqual("abcd\r\n", line)
- self.assertIs(None, reader._kept)
-
- def test_getline_no_newline(self):
- inp = StringIO.StringIO("abcd")
- reader = http_app._FencedReader(inp, 4, 10)
- reader.MAXCHUNK = 6
- line = reader.getline()
- self.assertEqual("abcd", line)
-
- def test_getline_many_chunks(self):
- inp = StringIO.StringIO("abcde\r\nf")
- reader = http_app._FencedReader(inp, 8, 10)
- reader.MAXCHUNK = 4
- line = reader.getline()
- self.assertEqual("abcde\r\n", line)
- self.assertEqual("f", reader._kept)
- line = reader.getline()
- self.assertEqual("f", line)
-
- def test_getline_empty(self):
- inp = StringIO.StringIO("")
- reader = http_app._FencedReader(inp, 0, 10)
- reader.MAXCHUNK = 4
- line = reader.getline()
- self.assertEqual("", line)
- line = reader.getline()
- self.assertEqual("", line)
-
- def test_getline_just_newline(self):
- inp = StringIO.StringIO("\r\n")
- reader = http_app._FencedReader(inp, 2, 10)
- reader.MAXCHUNK = 4
- line = reader.getline()
- self.assertEqual("\r\n", line)
- line = reader.getline()
- self.assertEqual("", line)
-
- def test_getline_too_large(self):
- inp = StringIO.StringIO("x" * 50)
- reader = http_app._FencedReader(inp, 50, 25)
- reader.MAXCHUNK = 4
- self.assertRaises(http_app.BadRequest, reader.getline)
-
- def test_getline_too_large_complete(self):
- inp = StringIO.StringIO("x" * 25 + "\r\n")
- reader = http_app._FencedReader(inp, 50, 25)
- reader.MAXCHUNK = 4
- self.assertRaises(http_app.BadRequest, reader.getline)
-
-
-class TestHTTPMethodDecorator(tests.TestCase):
-
- def test_args(self):
- @http_app.http_method()
- def f(self, a, b):
- return self, a, b
- res = f("self", {"a": "x", "b": "y"}, None)
- self.assertEqual(("self", "x", "y"), res)
-
- def test_args_missing(self):
- @http_app.http_method()
- def f(self, a, b):
- return a, b
- self.assertRaises(http_app.BadRequest, f, "self", {"a": "x"}, None)
-
- def test_args_unexpected(self):
- @http_app.http_method()
- def f(self, a):
- return a
- self.assertRaises(http_app.BadRequest, f, "self",
- {"a": "x", "c": "z"}, None)
-
- def test_args_default(self):
- @http_app.http_method()
- def f(self, a, b="z"):
- return a, b
- res = f("self", {"a": "x"}, None)
- self.assertEqual(("x", "z"), res)
-
- def test_args_conversion(self):
- @http_app.http_method(b=int)
- def f(self, a, b):
- return self, a, b
- res = f("self", {"a": "x", "b": "2"}, None)
- self.assertEqual(("self", "x", 2), res)
-
- self.assertRaises(http_app.BadRequest, f, "self",
- {"a": "x", "b": "foo"}, None)
-
- def test_args_conversion_with_default(self):
- @http_app.http_method(b=str)
- def f(self, a, b=None):
- return self, a, b
- res = f("self", {"a": "x"}, None)
- self.assertEqual(("self", "x", None), res)
-
- def test_args_content(self):
- @http_app.http_method()
- def f(self, a, content):
- return a, content
- res = f(self, {"a": "x"}, "CONTENT")
- self.assertEqual(("x", "CONTENT"), res)
-
- def test_args_content_as_args(self):
- @http_app.http_method(b=int, content_as_args=True)
- def f(self, a, b):
- return self, a, b
- res = f("self", {"a": "x"}, '{"b": "2"}')
- self.assertEqual(("self", "x", 2), res)
-
- self.assertRaises(http_app.BadRequest, f, "self", {}, 'not-json')
-
- def test_args_content_no_query(self):
- @http_app.http_method(no_query=True,
- content_as_args=True)
- def f(self, a='a', b='b'):
- return a, b
- res = f("self", {}, '{"b": "y"}')
- self.assertEqual(('a', 'y'), res)
-
- self.assertRaises(http_app.BadRequest, f, "self", {'a': 'x'},
- '{"b": "y"}')
-
-
-class TestResource(object):
-
- @http_app.http_method()
- def get(self, a, b):
- self.args = dict(a=a, b=b)
- return 'Get'
-
- @http_app.http_method()
- def put(self, a, content):
- self.args = dict(a=a)
- self.content = content
- return 'Put'
-
- @http_app.http_method(content_as_args=True)
- def put_args(self, a, b):
- self.args = dict(a=a, b=b)
- self.order = ['a']
- self.entries = []
-
- @http_app.http_method()
- def put_stream_entry(self, content):
- self.entries.append(content)
- self.order.append('s')
-
- def put_end(self):
- self.order.append('e')
- return "Put/end"
-
-
-class parameters:
- max_request_size = 200000
- max_entry_size = 100000
-
-
-class TestHTTPInvocationByMethodWithBody(tests.TestCase):
-
- def test_get(self):
- resource = TestResource()
- environ = {'QUERY_STRING': 'a=1&b=2', 'REQUEST_METHOD': 'GET'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- res = invoke()
- self.assertEqual('Get', res)
- self.assertEqual({'a': '1', 'b': '2'}, resource.args)
-
- def test_put_json(self):
- resource = TestResource()
- body = '{"body": true}'
- environ = {'QUERY_STRING': 'a=1', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO(body),
- 'CONTENT_LENGTH': str(len(body)),
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- res = invoke()
- self.assertEqual('Put', res)
- self.assertEqual({'a': '1'}, resource.args)
- self.assertEqual('{"body": true}', resource.content)
-
- def test_put_sync_stream(self):
- resource = TestResource()
- body = (
- '[\r\n'
- '{"b": 2},\r\n' # args
- '{"entry": "x"},\r\n' # stream entry
- '{"entry": "y"}\r\n' # stream entry
- ']'
- )
- environ = {'QUERY_STRING': 'a=1', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO(body),
- 'CONTENT_LENGTH': str(len(body)),
- 'CONTENT_TYPE': 'application/x-u1db-sync-stream'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- res = invoke()
- self.assertEqual('Put/end', res)
- self.assertEqual({'a': '1', 'b': 2}, resource.args)
- self.assertEqual(
- ['{"entry": "x"}', '{"entry": "y"}'], resource.entries)
- self.assertEqual(['a', 's', 's', 'e'], resource.order)
-
- def _put_sync_stream(self, body):
- resource = TestResource()
- environ = {'QUERY_STRING': 'a=1&b=2', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO(body),
- 'CONTENT_LENGTH': str(len(body)),
- 'CONTENT_TYPE': 'application/x-u1db-sync-stream'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- invoke()
-
- def test_put_sync_stream_wrong_start(self):
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "{}\r\n]")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "\r\n{}\r\n]")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "")
-
- def test_put_sync_stream_wrong_end(self):
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{}")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{}\r\n]\r\n...")
-
- def test_put_sync_stream_missing_comma(self):
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{}\r\n{}\r\n]")
-
- def test_put_sync_stream_extra_comma(self):
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{},\r\n]")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{},\r\n{},\r\n]")
-
- def test_bad_request_decode_failure(self):
- resource = TestResource()
- environ = {'QUERY_STRING': 'a=\xff', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('{}'),
- 'CONTENT_LENGTH': '2',
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_unsupported_content_type(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('{}'),
- 'CONTENT_LENGTH': '2',
- 'CONTENT_TYPE': 'text/plain'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_content_length_too_large(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('{}'),
- 'CONTENT_LENGTH': '10000',
- 'CONTENT_TYPE': 'text/plain'}
-
- resource.max_request_size = 5000
- resource.max_entry_size = sys.maxint # we don't get to use this
-
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_no_content_length(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('a'),
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_invalid_content_length(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('abc'),
- 'CONTENT_LENGTH': '1unk',
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_empty_body(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO(''),
- 'CONTENT_LENGTH': '0',
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_unsupported_method_get_like(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'DELETE'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_unsupported_method_put_like(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('{}'),
- 'CONTENT_LENGTH': '2',
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_unsupported_method_put_like_multi_json(self):
- resource = TestResource()
- body = '{}\r\n{}\r\n'
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'POST',
- 'wsgi.input': StringIO.StringIO(body),
- 'CONTENT_LENGTH': str(len(body)),
- 'CONTENT_TYPE': 'application/x-u1db-multi-json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
-
-class TestHTTPResponder(tests.TestCase):
-
- def start_response(self, status, headers):
- self.status = status
- self.headers = dict(headers)
- self.response_body = []
-
- def write(data):
- self.response_body.append(data)
-
- return write
-
- def test_send_response_content_w_headers(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.send_response_content('foo', headers={'x-a': '1'})
- self.assertEqual('200 OK', self.status)
- self.assertEqual({'content-type': 'application/json',
- 'cache-control': 'no-cache',
- 'x-a': '1', 'content-length': '3'}, self.headers)
- self.assertEqual([], self.response_body)
- self.assertEqual(['foo'], responder.content)
-
- def test_send_response_json(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.send_response_json(value='success')
- self.assertEqual('200 OK', self.status)
- expected_body = '{"value": "success"}\r\n'
- self.assertEqual({'content-type': 'application/json',
- 'content-length': str(len(expected_body)),
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual([], self.response_body)
- self.assertEqual([expected_body], responder.content)
-
- def test_send_response_json_status_fail(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.send_response_json(400)
- self.assertEqual('400 Bad Request', self.status)
- expected_body = '{}\r\n'
- self.assertEqual({'content-type': 'application/json',
- 'content-length': str(len(expected_body)),
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual([], self.response_body)
- self.assertEqual([expected_body], responder.content)
-
- def test_start_finish_response_status_fail(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.start_response(404, {'error': 'not found'})
- responder.finish_response()
- self.assertEqual('404 Not Found', self.status)
- self.assertEqual({'content-type': 'application/json',
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual(['{"error": "not found"}\r\n'], self.response_body)
- self.assertEqual([], responder.content)
-
- def test_send_stream_entry(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.content_type = "application/x-u1db-multi-json"
- responder.start_response(200)
- responder.start_stream()
- responder.stream_entry({'entry': 1})
- responder.stream_entry({'entry': 2})
- responder.end_stream()
- responder.finish_response()
- self.assertEqual('200 OK', self.status)
- self.assertEqual({'content-type': 'application/x-u1db-multi-json',
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual(['[',
- '\r\n', '{"entry": 1}',
- ',\r\n', '{"entry": 2}',
- '\r\n]\r\n'], self.response_body)
- self.assertEqual([], responder.content)
-
- def test_send_stream_w_error(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.content_type = "application/x-u1db-multi-json"
- responder.start_response(200)
- responder.start_stream()
- responder.stream_entry({'entry': 1})
- responder.send_response_json(503, error="unavailable")
- self.assertEqual('200 OK', self.status)
- self.assertEqual({'content-type': 'application/x-u1db-multi-json',
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual(['[',
- '\r\n', '{"entry": 1}'], self.response_body)
- self.assertEqual([',\r\n', '{"error": "unavailable"}\r\n'],
- responder.content)
-
-
-class TestHTTPApp(tests.TestCase):
-
- def setUp(self):
- super(TestHTTPApp, self).setUp()
- self.state = tests.ServerStateForTests()
- self.http_app = http_app.HTTPApp(self.state)
- self.app = paste.fixture.TestApp(self.http_app)
- self.db0 = self.state._create_database('db0')
-
- def test_bad_request_broken(self):
- resp = self.app.put('/db0/doc/doc1', params='{"x": 1}',
- headers={'content-type': 'application/foo'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_bad_request_dispatch(self):
- resp = self.app.put('/db0/foo/doc1', params='{"x": 1}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_version(self):
- resp = self.app.get('/')
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({"version": _u1db_version}, json.loads(resp.body))
-
- def test_create_database(self):
- resp = self.app.put('/db1', params='{}',
- headers={'content-type': 'application/json'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'ok': True}, json.loads(resp.body))
-
- resp = self.app.put('/db1', params='{}',
- headers={'content-type': 'application/json'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'ok': True}, json.loads(resp.body))
-
- def test_delete_database(self):
- resp = self.app.delete('/db0')
- self.assertEqual(200, resp.status)
- self.assertRaises(errors.DatabaseDoesNotExist,
- self.state.check_database, 'db0')
-
- def test_get_database(self):
- resp = self.app.get('/db0')
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({}, json.loads(resp.body))
-
- def test_valid_database_names(self):
- resp = self.app.get('/a-database', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- resp = self.app.get('/db1', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- resp = self.app.get('/0', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- resp = self.app.get('/0-0', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- resp = self.app.get('/org.future', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- def test_invalid_database_names(self):
- resp = self.app.get('/.a', expect_errors=True)
- self.assertEqual(400, resp.status)
-
- resp = self.app.get('/-a', expect_errors=True)
- self.assertEqual(400, resp.status)
-
- resp = self.app.get('/_a', expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_put_doc_create(self):
- resp = self.app.put('/db0/doc/doc1', params='{"x": 1}',
- headers={'content-type': 'application/json'})
- doc = self.db0.get_doc('doc1')
- self.assertEqual(201, resp.status) # created
- self.assertEqual('{"x": 1}', doc.get_json())
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'rev': doc.rev}, json.loads(resp.body))
-
- def test_put_doc(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- resp = self.app.put('/db0/doc/doc1?old_rev=%s' % doc.rev,
- params='{"x": 2}',
- headers={'content-type': 'application/json'})
- doc = self.db0.get_doc('doc1')
- self.assertEqual(200, resp.status)
- self.assertEqual('{"x": 2}', doc.get_json())
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'rev': doc.rev}, json.loads(resp.body))
-
- def test_put_doc_too_large(self):
- self.http_app.max_request_size = 15000
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- resp = self.app.put('/db0/doc/doc1?old_rev=%s' % doc.rev,
- params='{"%s": 2}' % ('z' * 16000),
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_delete_doc(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- resp = self.app.delete('/db0/doc/doc1?old_rev=%s' % doc.rev)
- doc = self.db0.get_doc('doc1', include_deleted=True)
- self.assertEqual(None, doc.content)
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'rev': doc.rev}, json.loads(resp.body))
-
- def test_get_doc(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- resp = self.app.get('/db0/doc/%s' % doc.doc_id)
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual('{"x": 1}', resp.body)
- self.assertEqual(doc.rev, resp.header('x-u1db-rev'))
- self.assertEqual('false', resp.header('x-u1db-has-conflicts'))
-
- def test_get_doc_non_existing(self):
- resp = self.app.get('/db0/doc/not-there', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "document does not exist"}, json.loads(resp.body))
- self.assertEqual('', resp.header('x-u1db-rev'))
- self.assertEqual('false', resp.header('x-u1db-has-conflicts'))
-
- def test_get_doc_deleted(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- self.db0.delete_doc(doc)
- resp = self.app.get('/db0/doc/doc1', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": errors.DocumentDoesNotExist.wire_description},
- json.loads(resp.body))
-
- def test_get_doc_deleted_explicit_exclude(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- self.db0.delete_doc(doc)
- resp = self.app.get(
- '/db0/doc/doc1?include_deleted=false', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": errors.DocumentDoesNotExist.wire_description},
- json.loads(resp.body))
-
- def test_get_deleted_doc(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- self.db0.delete_doc(doc)
- resp = self.app.get(
- '/db0/doc/doc1?include_deleted=true', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": errors.DOCUMENT_DELETED}, json.loads(resp.body))
- self.assertEqual(doc.rev, resp.header('x-u1db-rev'))
- self.assertEqual('false', resp.header('x-u1db-has-conflicts'))
-
- def test_get_doc_non_existing_dabase(self):
- resp = self.app.get('/not-there/doc/doc1', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "database does not exist"}, json.loads(resp.body))
-
- def test_get_docs(self):
- doc1 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- doc2 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc2')
- ids = ','.join([doc1.doc_id, doc2.doc_id])
- resp = self.app.get('/db0/docs?doc_ids=%s' % ids)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- 'application/json', resp.header('content-type'))
- expected = [
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc1",
- "has_conflicts": False},
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc2",
- "has_conflicts": False}]
- self.assertEqual(expected, json.loads(resp.body))
-
- def test_get_docs_missing_doc_ids(self):
- resp = self.app.get('/db0/docs', expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "missing document ids"}, json.loads(resp.body))
-
- def test_get_docs_empty_doc_ids(self):
- resp = self.app.get('/db0/docs?doc_ids=', expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "missing document ids"}, json.loads(resp.body))
-
- def test_get_docs_percent(self):
- doc1 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc%1')
- doc2 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc2')
- ids = ','.join([doc1.doc_id, doc2.doc_id])
- resp = self.app.get('/db0/docs?doc_ids=%s' % ids)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- 'application/json', resp.header('content-type'))
- expected = [
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc%1",
- "has_conflicts": False},
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc2",
- "has_conflicts": False}]
- self.assertEqual(expected, json.loads(resp.body))
-
- def test_get_docs_deleted(self):
- doc1 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- doc2 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc2')
- self.db0.delete_doc(doc2)
- ids = ','.join([doc1.doc_id, doc2.doc_id])
- resp = self.app.get('/db0/docs?doc_ids=%s' % ids)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- 'application/json', resp.header('content-type'))
- expected = [
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc1",
- "has_conflicts": False}]
- self.assertEqual(expected, json.loads(resp.body))
-
- def test_get_docs_include_deleted(self):
- doc1 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- doc2 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc2')
- self.db0.delete_doc(doc2)
- ids = ','.join([doc1.doc_id, doc2.doc_id])
- resp = self.app.get('/db0/docs?doc_ids=%s&include_deleted=true' % ids)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- 'application/json', resp.header('content-type'))
- expected = [
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc1",
- "has_conflicts": False},
- {"content": None, "doc_rev": "db0:2", "doc_id": "doc2",
- "has_conflicts": False}]
- self.assertEqual(expected, json.loads(resp.body))
-
- def test_get_sync_info(self):
- self.db0._set_replica_gen_and_trans_id('other-id', 1, 'T-transid')
- resp = self.app.get('/db0/sync-from/other-id')
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(dict(target_replica_uid='db0',
- target_replica_generation=0,
- target_replica_transaction_id='',
- source_replica_uid='other-id',
- source_replica_generation=1,
- source_transaction_id='T-transid'),
- json.loads(resp.body))
-
- def test_record_sync_info(self):
- resp = self.app.put('/db0/sync-from/other-id',
- params='{"generation": 2, "transaction_id": "T-transid"}',
- headers={'content-type': 'application/json'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'ok': True}, json.loads(resp.body))
- self.assertEqual(
- (2, 'T-transid'),
- self.db0._get_replica_gen_and_trans_id('other-id'))
-
- def test_sync_exchange_send(self):
- entries = {
- 10: {'id': 'doc-here', 'rev': 'replica:1', 'content':
- '{"value": "here"}', 'gen': 10, 'trans_id': 'T-sid'},
- 11: {'id': 'doc-here2', 'rev': 'replica:1', 'content':
- '{"value": "here2"}', 'gen': 11, 'trans_id': 'T-sed'}
- }
-
- gens = []
- _do_set_replica_gen_and_trans_id = \
- self.db0._do_set_replica_gen_and_trans_id
-
- def set_sync_generation_witness(other_uid, other_gen, other_trans_id):
- gens.append((other_uid, other_gen))
- _do_set_replica_gen_and_trans_id(
- other_uid, other_gen, other_trans_id)
- self.assertGetDoc(self.db0, entries[other_gen]['id'],
- entries[other_gen]['rev'],
- entries[other_gen]['content'], False)
-
- self.patch(
- self.db0, '_do_set_replica_gen_and_trans_id',
- set_sync_generation_witness)
-
- args = dict(last_known_generation=0)
- body = ("[\r\n" +
- "%s,\r\n" % json.dumps(args) +
- "%s,\r\n" % json.dumps(entries[10]) +
- "%s\r\n" % json.dumps(entries[11]) +
- "]\r\n")
- resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/x-u1db-sync-stream',
- resp.header('content-type'))
- bits = resp.body.split('\r\n')
- self.assertEqual('[', bits[0])
- last_trans_id = self.db0._get_transaction_log()[-1][1]
- self.assertEqual({'new_generation': 2,
- 'new_transaction_id': last_trans_id},
- json.loads(bits[1]))
- self.assertEqual(']', bits[2])
- self.assertEqual('', bits[3])
- self.assertEqual([('replica', 10), ('replica', 11)], gens)
-
- def test_sync_exchange_send_ensure(self):
- entries = {
- 10: {'id': 'doc-here', 'rev': 'replica:1', 'content':
- '{"value": "here"}', 'gen': 10, 'trans_id': 'T-sid'},
- 11: {'id': 'doc-here2', 'rev': 'replica:1', 'content':
- '{"value": "here2"}', 'gen': 11, 'trans_id': 'T-sed'}
- }
-
- args = dict(last_known_generation=0, ensure=True)
- body = ("[\r\n" +
- "%s,\r\n" % json.dumps(args) +
- "%s,\r\n" % json.dumps(entries[10]) +
- "%s\r\n" % json.dumps(entries[11]) +
- "]\r\n")
- resp = self.app.post('/dbnew/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/x-u1db-sync-stream',
- resp.header('content-type'))
- bits = resp.body.split('\r\n')
- self.assertEqual('[', bits[0])
- dbnew = self.state.open_database("dbnew")
- last_trans_id = dbnew._get_transaction_log()[-1][1]
- self.assertEqual({'new_generation': 2,
- 'new_transaction_id': last_trans_id,
- 'replica_uid': dbnew._replica_uid},
- json.loads(bits[1]))
- self.assertEqual(']', bits[2])
- self.assertEqual('', bits[3])
-
- def test_sync_exchange_send_entry_too_large(self):
- self.patch(http_app.SyncResource, 'max_request_size', 20000)
- self.patch(http_app.SyncResource, 'max_entry_size', 10000)
- entries = {
- 10: {'id': 'doc-here', 'rev': 'replica:1', 'content':
- '{"value": "%s"}' % ('H' * 11000), 'gen': 10},
- }
- args = dict(last_known_generation=0)
- body = ("[\r\n" +
- "%s,\r\n" % json.dumps(args) +
- "%s\r\n" % json.dumps(entries[10]) +
- "]\r\n")
- resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_sync_exchange_receive(self):
- doc = self.db0.create_doc_from_json('{"value": "there"}')
- doc2 = self.db0.create_doc_from_json('{"value": "there2"}')
- args = dict(last_known_generation=0)
- body = "[\r\n%s\r\n]" % json.dumps(args)
- resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/x-u1db-sync-stream',
- resp.header('content-type'))
- parts = resp.body.splitlines()
- self.assertEqual(5, len(parts))
- self.assertEqual('[', parts[0])
- last_trans_id = self.db0._get_transaction_log()[-1][1]
- self.assertEqual({'new_generation': 2,
- 'new_transaction_id': last_trans_id},
- json.loads(parts[1].rstrip(",")))
- part2 = json.loads(parts[2].rstrip(","))
- self.assertTrue(part2['trans_id'].startswith('T-'))
- self.assertEqual('{"value": "there"}', part2['content'])
- self.assertEqual(doc.rev, part2['rev'])
- self.assertEqual(doc.doc_id, part2['id'])
- self.assertEqual(1, part2['gen'])
- part3 = json.loads(parts[3].rstrip(","))
- self.assertTrue(part3['trans_id'].startswith('T-'))
- self.assertEqual('{"value": "there2"}', part3['content'])
- self.assertEqual(doc2.rev, part3['rev'])
- self.assertEqual(doc2.doc_id, part3['id'])
- self.assertEqual(2, part3['gen'])
- self.assertEqual(']', parts[4])
-
- def test_sync_exchange_error_in_stream(self):
- args = dict(last_known_generation=0)
- body = "[\r\n%s\r\n]" % json.dumps(args)
-
- def boom(self, return_doc_cb):
- raise errors.Unavailable
-
- self.patch(sync.SyncExchange, 'return_docs',
- boom)
- resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/x-u1db-sync-stream',
- resp.header('content-type'))
- parts = resp.body.splitlines()
- self.assertEqual(3, len(parts))
- self.assertEqual('[', parts[0])
- self.assertEqual({'new_generation': 0, 'new_transaction_id': ''},
- json.loads(parts[1].rstrip(",")))
- self.assertEqual({'error': 'unavailable'}, json.loads(parts[2]))
-
-
-class TestRequestHooks(tests.TestCase):
-
- def setUp(self):
- super(TestRequestHooks, self).setUp()
- self.state = tests.ServerStateForTests()
- self.http_app = http_app.HTTPApp(self.state)
- self.app = paste.fixture.TestApp(self.http_app)
- self.db0 = self.state._create_database('db0')
-
- def test_begin_and_done(self):
- calls = []
-
- def begin(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('begin')
-
- def done(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('done')
-
- self.http_app.request_begin = begin
- self.http_app.request_done = done
-
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- self.app.get('/db0/doc/%s' % doc.doc_id)
-
- self.assertEqual(['begin', 'done'], calls)
-
- def test_bad_request(self):
- calls = []
-
- def begin(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('begin')
-
- def bad_request(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('bad-request')
-
- self.http_app.request_begin = begin
- self.http_app.request_bad_request = bad_request
- # shouldn't be called
- self.http_app.request_done = lambda env: 1 / 0
-
- resp = self.app.put('/db0/foo/doc1', params='{"x": 1}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual(['begin', 'bad-request'], calls)
-
-
-class TestHTTPErrors(tests.TestCase):
-
- def test_wire_description_to_status(self):
- self.assertNotIn("error", http_errors.wire_description_to_status)
-
-
-class TestHTTPAppErrorHandling(tests.TestCase):
-
- def setUp(self):
- super(TestHTTPAppErrorHandling, self).setUp()
- self.exc = None
- self.state = tests.ServerStateForTests()
-
- class ErroringResource(object):
-
- def post(_, args, content):
- raise self.exc
-
- def lookup_resource(environ, responder):
- return ErroringResource()
-
- self.http_app = http_app.HTTPApp(self.state)
- self.http_app._lookup_resource = lookup_resource
- self.app = paste.fixture.TestApp(self.http_app)
-
- def test_RevisionConflict_etc(self):
- self.exc = errors.RevisionConflict()
- resp = self.app.post('/req', params='{}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(409, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({"error": "revision conflict"},
- json.loads(resp.body))
-
- def test_Unavailable(self):
- self.exc = errors.Unavailable
- resp = self.app.post('/req', params='{}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(503, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({"error": "unavailable"},
- json.loads(resp.body))
-
- def test_generic_u1db_errors(self):
- self.exc = errors.U1DBError()
- resp = self.app.post('/req', params='{}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(500, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({"error": "error"},
- json.loads(resp.body))
-
- def test_generic_u1db_errors_hooks(self):
- calls = []
-
- def begin(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('begin')
-
- def u1db_error(environ, exc):
- self.assertTrue('PATH_INFO' in environ)
- calls.append(('error', exc))
-
- self.http_app.request_begin = begin
- self.http_app.request_u1db_error = u1db_error
- # shouldn't be called
- self.http_app.request_done = lambda env: 1 / 0
-
- self.exc = errors.U1DBError()
- resp = self.app.post('/req', params='{}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(500, resp.status)
- self.assertEqual(['begin', ('error', self.exc)], calls)
-
- def test_failure(self):
- class Failure(Exception):
- pass
- self.exc = Failure()
- self.assertRaises(Failure, self.app.post, '/req', params='{}',
- headers={'content-type': 'application/json'})
-
- def test_failure_hooks(self):
- class Failure(Exception):
- pass
- calls = []
-
- def begin(environ):
- calls.append('begin')
-
- def failed(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append(('failed', sys.exc_info()))
-
- self.http_app.request_begin = begin
- self.http_app.request_failed = failed
- # shouldn't be called
- self.http_app.request_done = lambda env: 1 / 0
-
- self.exc = Failure()
- self.assertRaises(Failure, self.app.post, '/req', params='{}',
- headers={'content-type': 'application/json'})
-
- self.assertEqual(2, len(calls))
- self.assertEqual('begin', calls[0])
- marker, (exc_type, exc, tb) = calls[1]
- self.assertEqual('failed', marker)
- self.assertEqual(self.exc, exc)
-
-
-class TestPluggableSyncExchange(tests.TestCase):
-
- def setUp(self):
- super(TestPluggableSyncExchange, self).setUp()
- self.state = tests.ServerStateForTests()
- self.state.ensure_database('foo')
-
- def test_plugging(self):
-
- class MySyncExchange(object):
- def __init__(self, db, source_replica_uid, last_known_generation):
- pass
-
- class MySyncResource(http_app.SyncResource):
- sync_exchange_class = MySyncExchange
-
- sync_res = MySyncResource('foo', 'src', self.state, None)
- sync_res.post_args(
- {'last_known_generation': 0, 'last_known_trans_id': None}, '{}')
- self.assertIsInstance(sync_res.sync_exch, MySyncExchange)
diff --git a/src/leap/soledad/u1db/tests/test_http_client.py b/src/leap/soledad/u1db/tests/test_http_client.py
deleted file mode 100644
index 115c8aaa..00000000
--- a/src/leap/soledad/u1db/tests/test_http_client.py
+++ /dev/null
@@ -1,361 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for HTTPDatabase"""
-
-from oauth import oauth
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-from u1db import (
- errors,
- tests,
- )
-from u1db.remote import (
- http_client,
- )
-
-
-class TestEncoder(tests.TestCase):
-
- def test_encode_string(self):
- self.assertEqual("foo", http_client._encode_query_parameter("foo"))
-
- def test_encode_true(self):
- self.assertEqual("true", http_client._encode_query_parameter(True))
-
- def test_encode_false(self):
- self.assertEqual("false", http_client._encode_query_parameter(False))
-
-
-class TestHTTPClientBase(tests.TestCaseWithServer):
-
- def setUp(self):
- super(TestHTTPClientBase, self).setUp()
- self.errors = 0
-
- def app(self, environ, start_response):
- if environ['PATH_INFO'].endswith('echo'):
- start_response("200 OK", [('Content-Type', 'application/json')])
- ret = {}
- for name in ('REQUEST_METHOD', 'PATH_INFO', 'QUERY_STRING'):
- ret[name] = environ[name]
- if environ['REQUEST_METHOD'] in ('PUT', 'POST'):
- ret['CONTENT_TYPE'] = environ['CONTENT_TYPE']
- content_length = int(environ['CONTENT_LENGTH'])
- ret['body'] = environ['wsgi.input'].read(content_length)
- return [json.dumps(ret)]
- elif environ['PATH_INFO'].endswith('error_then_accept'):
- if self.errors >= 3:
- start_response(
- "200 OK", [('Content-Type', 'application/json')])
- ret = {}
- for name in ('REQUEST_METHOD', 'PATH_INFO', 'QUERY_STRING'):
- ret[name] = environ[name]
- if environ['REQUEST_METHOD'] in ('PUT', 'POST'):
- ret['CONTENT_TYPE'] = environ['CONTENT_TYPE']
- content_length = int(environ['CONTENT_LENGTH'])
- ret['body'] = '{"oki": "doki"}'
- return [json.dumps(ret)]
- self.errors += 1
- content_length = int(environ['CONTENT_LENGTH'])
- error = json.loads(
- environ['wsgi.input'].read(content_length))
- response = error['response']
- # In debug mode, wsgiref has an assertion that the status parameter
- # is a 'str' object. However error['status'] returns a unicode
- # object.
- status = str(error['status'])
- if isinstance(response, unicode):
- response = str(response)
- if isinstance(response, str):
- start_response(status, [('Content-Type', 'text/plain')])
- return [str(response)]
- else:
- start_response(status, [('Content-Type', 'application/json')])
- return [json.dumps(response)]
- elif environ['PATH_INFO'].endswith('error'):
- self.errors += 1
- content_length = int(environ['CONTENT_LENGTH'])
- error = json.loads(
- environ['wsgi.input'].read(content_length))
- response = error['response']
- # In debug mode, wsgiref has an assertion that the status parameter
- # is a 'str' object. However error['status'] returns a unicode
- # object.
- status = str(error['status'])
- if isinstance(response, unicode):
- response = str(response)
- if isinstance(response, str):
- start_response(status, [('Content-Type', 'text/plain')])
- return [str(response)]
- else:
- start_response(status, [('Content-Type', 'application/json')])
- return [json.dumps(response)]
- elif '/oauth' in environ['PATH_INFO']:
- base_url = self.getURL('').rstrip('/')
- oauth_req = oauth.OAuthRequest.from_request(
- http_method=environ['REQUEST_METHOD'],
- http_url=base_url + environ['PATH_INFO'],
- headers={'Authorization': environ['HTTP_AUTHORIZATION']},
- query_string=environ['QUERY_STRING']
- )
- oauth_server = oauth.OAuthServer(tests.testingOAuthStore)
- oauth_server.add_signature_method(tests.sign_meth_HMAC_SHA1)
- try:
- consumer, token, params = oauth_server.verify_request(
- oauth_req)
- except oauth.OAuthError, e:
- start_response("401 Unauthorized",
- [('Content-Type', 'application/json')])
- return [json.dumps({"error": "unauthorized",
- "message": e.message})]
- start_response("200 OK", [('Content-Type', 'application/json')])
- return [json.dumps([environ['PATH_INFO'], token.key, params])]
-
- def make_app(self):
- return self.app
-
- def getClient(self, **kwds):
- self.startServer()
- return http_client.HTTPClientBase(self.getURL('dbase'), **kwds)
-
- def test_construct(self):
- self.startServer()
- url = self.getURL()
- cli = http_client.HTTPClientBase(url)
- self.assertEqual(url, cli._url.geturl())
- self.assertIs(None, cli._conn)
-
- def test_parse_url(self):
- cli = http_client.HTTPClientBase(
- '%s://127.0.0.1:12345/' % self.url_scheme)
- self.assertEqual(self.url_scheme, cli._url.scheme)
- self.assertEqual('127.0.0.1', cli._url.hostname)
- self.assertEqual(12345, cli._url.port)
- self.assertEqual('/', cli._url.path)
-
- def test__ensure_connection(self):
- cli = self.getClient()
- self.assertIs(None, cli._conn)
- cli._ensure_connection()
- self.assertIsNot(None, cli._conn)
- conn = cli._conn
- cli._ensure_connection()
- self.assertIs(conn, cli._conn)
-
- def test_close(self):
- cli = self.getClient()
- cli._ensure_connection()
- cli.close()
- self.assertIs(None, cli._conn)
-
- def test__request(self):
- cli = self.getClient()
- res, headers = cli._request('PUT', ['echo'], {}, {})
- self.assertEqual({'CONTENT_TYPE': 'application/json',
- 'PATH_INFO': '/dbase/echo',
- 'QUERY_STRING': '',
- 'body': '{}',
- 'REQUEST_METHOD': 'PUT'}, json.loads(res))
-
- res, headers = cli._request('GET', ['doc', 'echo'], {'a': 1})
- self.assertEqual({'PATH_INFO': '/dbase/doc/echo',
- 'QUERY_STRING': 'a=1',
- 'REQUEST_METHOD': 'GET'}, json.loads(res))
-
- res, headers = cli._request('GET', ['doc', '%FFFF', 'echo'], {'a': 1})
- self.assertEqual({'PATH_INFO': '/dbase/doc/%FFFF/echo',
- 'QUERY_STRING': 'a=1',
- 'REQUEST_METHOD': 'GET'}, json.loads(res))
-
- res, headers = cli._request('POST', ['echo'], {'b': 2}, 'Body',
- 'application/x-test')
- self.assertEqual({'CONTENT_TYPE': 'application/x-test',
- 'PATH_INFO': '/dbase/echo',
- 'QUERY_STRING': 'b=2',
- 'body': 'Body',
- 'REQUEST_METHOD': 'POST'}, json.loads(res))
-
- def test__request_json(self):
- cli = self.getClient()
- res, headers = cli._request_json(
- 'POST', ['echo'], {'b': 2}, {'a': 'x'})
- self.assertEqual('application/json', headers['content-type'])
- self.assertEqual({'CONTENT_TYPE': 'application/json',
- 'PATH_INFO': '/dbase/echo',
- 'QUERY_STRING': 'b=2',
- 'body': '{"a": "x"}',
- 'REQUEST_METHOD': 'POST'}, res)
-
- def test_unspecified_http_error(self):
- cli = self.getClient()
- self.assertRaises(errors.HTTPError,
- cli._request_json, 'POST', ['error'], {},
- {'status': "500 Internal Error",
- 'response': "Crash."})
- try:
- cli._request_json('POST', ['error'], {},
- {'status': "500 Internal Error",
- 'response': "Fail."})
- except errors.HTTPError, e:
- pass
-
- self.assertEqual(500, e.status)
- self.assertEqual("Fail.", e.message)
- self.assertTrue("content-type" in e.headers)
-
- def test_revision_conflict(self):
- cli = self.getClient()
- self.assertRaises(errors.RevisionConflict,
- cli._request_json, 'POST', ['error'], {},
- {'status': "409 Conflict",
- 'response': {"error": "revision conflict"}})
-
- def test_unavailable_proper(self):
- cli = self.getClient()
- cli._delays = (0, 0, 0, 0, 0)
- self.assertRaises(errors.Unavailable,
- cli._request_json, 'POST', ['error'], {},
- {'status': "503 Service Unavailable",
- 'response': {"error": "unavailable"}})
- self.assertEqual(5, self.errors)
-
- def test_unavailable_then_available(self):
- cli = self.getClient()
- cli._delays = (0, 0, 0, 0, 0)
- res, headers = cli._request_json(
- 'POST', ['error_then_accept'], {'b': 2},
- {'status': "503 Service Unavailable",
- 'response': {"error": "unavailable"}})
- self.assertEqual('application/json', headers['content-type'])
- self.assertEqual({'CONTENT_TYPE': 'application/json',
- 'PATH_INFO': '/dbase/error_then_accept',
- 'QUERY_STRING': 'b=2',
- 'body': '{"oki": "doki"}',
- 'REQUEST_METHOD': 'POST'}, res)
- self.assertEqual(3, self.errors)
-
- def test_unavailable_random_source(self):
- cli = self.getClient()
- cli._delays = (0, 0, 0, 0, 0)
- try:
- cli._request_json('POST', ['error'], {},
- {'status': "503 Service Unavailable",
- 'response': "random unavailable."})
- except errors.Unavailable, e:
- pass
-
- self.assertEqual(503, e.status)
- self.assertEqual("random unavailable.", e.message)
- self.assertTrue("content-type" in e.headers)
- self.assertEqual(5, self.errors)
-
- def test_document_too_big(self):
- cli = self.getClient()
- self.assertRaises(errors.DocumentTooBig,
- cli._request_json, 'POST', ['error'], {},
- {'status': "403 Forbidden",
- 'response': {"error": "document too big"}})
-
- def test_user_quota_exceeded(self):
- cli = self.getClient()
- self.assertRaises(errors.UserQuotaExceeded,
- cli._request_json, 'POST', ['error'], {},
- {'status': "403 Forbidden",
- 'response': {"error": "user quota exceeded"}})
-
- def test_user_needs_subscription(self):
- cli = self.getClient()
- self.assertRaises(errors.SubscriptionNeeded,
- cli._request_json, 'POST', ['error'], {},
- {'status': "403 Forbidden",
- 'response': {"error": "user needs subscription"}})
-
- def test_generic_u1db_error(self):
- cli = self.getClient()
- self.assertRaises(errors.U1DBError,
- cli._request_json, 'POST', ['error'], {},
- {'status': "400 Bad Request",
- 'response': {"error": "error"}})
- try:
- cli._request_json('POST', ['error'], {},
- {'status': "400 Bad Request",
- 'response': {"error": "error"}})
- except errors.U1DBError, e:
- pass
- self.assertIs(e.__class__, errors.U1DBError)
-
- def test_unspecified_bad_request(self):
- cli = self.getClient()
- self.assertRaises(errors.HTTPError,
- cli._request_json, 'POST', ['error'], {},
- {'status': "400 Bad Request",
- 'response': "<Bad Request>"})
- try:
- cli._request_json('POST', ['error'], {},
- {'status': "400 Bad Request",
- 'response': "<Bad Request>"})
- except errors.HTTPError, e:
- pass
-
- self.assertEqual(400, e.status)
- self.assertEqual("<Bad Request>", e.message)
- self.assertTrue("content-type" in e.headers)
-
- def test_oauth(self):
- cli = self.getClient()
- cli.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- params = {'x': u'\xf0', 'y': "foo"}
- res, headers = cli._request('GET', ['doc', 'oauth'], params)
- self.assertEqual(
- ['/dbase/doc/oauth', tests.token1.key, params], json.loads(res))
-
- # oauth does its own internal quoting
- params = {'x': u'\xf0', 'y': "foo"}
- res, headers = cli._request('GET', ['doc', 'oauth', 'foo bar'], params)
- self.assertEqual(
- ['/dbase/doc/oauth/foo bar', tests.token1.key, params],
- json.loads(res))
-
- def test_oauth_ctr_creds(self):
- cli = self.getClient(creds={'oauth': {
- 'consumer_key': tests.consumer1.key,
- 'consumer_secret': tests.consumer1.secret,
- 'token_key': tests.token1.key,
- 'token_secret': tests.token1.secret,
- }})
- params = {'x': u'\xf0', 'y': "foo"}
- res, headers = cli._request('GET', ['doc', 'oauth'], params)
- self.assertEqual(
- ['/dbase/doc/oauth', tests.token1.key, params], json.loads(res))
-
- def test_unknown_creds(self):
- self.assertRaises(errors.UnknownAuthMethod,
- self.getClient, creds={'foo': {}})
- self.assertRaises(errors.UnknownAuthMethod,
- self.getClient, creds={})
-
- def test_oauth_Unauthorized(self):
- cli = self.getClient()
- cli.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, "WRONG")
- params = {'y': 'foo'}
- self.assertRaises(errors.Unauthorized, cli._request, 'GET',
- ['doc', 'oauth'], params)
diff --git a/src/leap/soledad/u1db/tests/test_http_database.py b/src/leap/soledad/u1db/tests/test_http_database.py
deleted file mode 100644
index c8e7eb76..00000000
--- a/src/leap/soledad/u1db/tests/test_http_database.py
+++ /dev/null
@@ -1,256 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for HTTPDatabase"""
-
-import inspect
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-from u1db import (
- errors,
- Document,
- tests,
- )
-from u1db.remote import (
- http_database,
- http_target,
- )
-from u1db.tests.test_remote_sync_target import (
- make_http_app,
-)
-
-
-class TestHTTPDatabaseSimpleOperations(tests.TestCase):
-
- def setUp(self):
- super(TestHTTPDatabaseSimpleOperations, self).setUp()
- self.db = http_database.HTTPDatabase('dbase')
- self.db._conn = object() # crash if used
- self.got = None
- self.response_val = None
-
- def _request(method, url_parts, params=None, body=None,
- content_type=None):
- self.got = method, url_parts, params, body, content_type
- if isinstance(self.response_val, Exception):
- raise self.response_val
- return self.response_val
-
- def _request_json(method, url_parts, params=None, body=None,
- content_type=None):
- self.got = method, url_parts, params, body, content_type
- if isinstance(self.response_val, Exception):
- raise self.response_val
- return self.response_val
-
- self.db._request = _request
- self.db._request_json = _request_json
-
- def test__sanity_same_signature(self):
- my_request_sig = inspect.getargspec(self.db._request)
- my_request_sig = (['self'] + my_request_sig[0],) + my_request_sig[1:]
- self.assertEqual(my_request_sig,
- inspect.getargspec(http_database.HTTPDatabase._request))
- my_request_json_sig = inspect.getargspec(self.db._request_json)
- my_request_json_sig = ((['self'] + my_request_json_sig[0],) +
- my_request_json_sig[1:])
- self.assertEqual(my_request_json_sig,
- inspect.getargspec(http_database.HTTPDatabase._request_json))
-
- def test__ensure(self):
- self.response_val = {'ok': True}, {}
- self.db._ensure()
- self.assertEqual(('PUT', [], {}, {}, None), self.got)
-
- def test__delete(self):
- self.response_val = {'ok': True}, {}
- self.db._delete()
- self.assertEqual(('DELETE', [], {}, {}, None), self.got)
-
- def test__check(self):
- self.response_val = {}, {}
- res = self.db._check()
- self.assertEqual({}, res)
- self.assertEqual(('GET', [], None, None, None), self.got)
-
- def test_put_doc(self):
- self.response_val = {'rev': 'doc-rev'}, {}
- doc = Document('doc-id', None, '{"v": 1}')
- res = self.db.put_doc(doc)
- self.assertEqual('doc-rev', res)
- self.assertEqual('doc-rev', doc.rev)
- self.assertEqual(('PUT', ['doc', 'doc-id'], {},
- '{"v": 1}', 'application/json'), self.got)
-
- self.response_val = {'rev': 'doc-rev-2'}, {}
- doc.content = {"v": 2}
- res = self.db.put_doc(doc)
- self.assertEqual('doc-rev-2', res)
- self.assertEqual('doc-rev-2', doc.rev)
- self.assertEqual(('PUT', ['doc', 'doc-id'], {'old_rev': 'doc-rev'},
- '{"v": 2}', 'application/json'), self.got)
-
- def test_get_doc(self):
- self.response_val = '{"v": 2}', {'x-u1db-rev': 'doc-rev',
- 'x-u1db-has-conflicts': 'false'}
- self.assertGetDoc(self.db, 'doc-id', 'doc-rev', '{"v": 2}', False)
- self.assertEqual(
- ('GET', ['doc', 'doc-id'], {'include_deleted': False}, None, None),
- self.got)
-
- def test_get_doc_non_existing(self):
- self.response_val = errors.DocumentDoesNotExist()
- self.assertIs(None, self.db.get_doc('not-there'))
- self.assertEqual(
- ('GET', ['doc', 'not-there'], {'include_deleted': False}, None,
- None), self.got)
-
- def test_get_doc_deleted(self):
- self.response_val = errors.DocumentDoesNotExist()
- self.assertIs(None, self.db.get_doc('deleted'))
- self.assertEqual(
- ('GET', ['doc', 'deleted'], {'include_deleted': False}, None,
- None), self.got)
-
- def test_get_doc_deleted_include_deleted(self):
- self.response_val = errors.HTTPError(404,
- json.dumps(
- {"error": errors.DOCUMENT_DELETED}
- ),
- {'x-u1db-rev': 'doc-rev-gone',
- 'x-u1db-has-conflicts': 'false'})
- doc = self.db.get_doc('deleted', include_deleted=True)
- self.assertEqual('deleted', doc.doc_id)
- self.assertEqual('doc-rev-gone', doc.rev)
- self.assertIs(None, doc.content)
- self.assertEqual(
- ('GET', ['doc', 'deleted'], {'include_deleted': True}, None, None),
- self.got)
-
- def test_get_doc_pass_through_errors(self):
- self.response_val = errors.HTTPError(500, 'Crash.')
- self.assertRaises(errors.HTTPError,
- self.db.get_doc, 'something-something')
-
- def test_create_doc_with_id(self):
- self.response_val = {'rev': 'doc-rev'}, {}
- new_doc = self.db.create_doc_from_json('{"v": 1}', doc_id='doc-id')
- self.assertEqual('doc-rev', new_doc.rev)
- self.assertEqual('doc-id', new_doc.doc_id)
- self.assertEqual('{"v": 1}', new_doc.get_json())
- self.assertEqual(('PUT', ['doc', 'doc-id'], {},
- '{"v": 1}', 'application/json'), self.got)
-
- def test_create_doc_without_id(self):
- self.response_val = {'rev': 'doc-rev-2'}, {}
- new_doc = self.db.create_doc_from_json('{"v": 3}')
- self.assertEqual('D-', new_doc.doc_id[:2])
- self.assertEqual('doc-rev-2', new_doc.rev)
- self.assertEqual('{"v": 3}', new_doc.get_json())
- self.assertEqual(('PUT', ['doc', new_doc.doc_id], {},
- '{"v": 3}', 'application/json'), self.got)
-
- def test_delete_doc(self):
- self.response_val = {'rev': 'doc-rev-gone'}, {}
- doc = Document('doc-id', 'doc-rev', None)
- self.db.delete_doc(doc)
- self.assertEqual('doc-rev-gone', doc.rev)
- self.assertEqual(('DELETE', ['doc', 'doc-id'], {'old_rev': 'doc-rev'},
- None, None), self.got)
-
- def test_get_sync_target(self):
- st = self.db.get_sync_target()
- self.assertIsInstance(st, http_target.HTTPSyncTarget)
- self.assertEqual(st._url, self.db._url)
-
- def test_get_sync_target_inherits_oauth_credentials(self):
- self.db.set_oauth_credentials(tests.consumer1.key,
- tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- st = self.db.get_sync_target()
- self.assertEqual(self.db._creds, st._creds)
-
-
-class TestHTTPDatabaseCtrWithCreds(tests.TestCase):
-
- def test_ctr_with_creds(self):
- db1 = http_database.HTTPDatabase('http://dbs/db', creds={'oauth': {
- 'consumer_key': tests.consumer1.key,
- 'consumer_secret': tests.consumer1.secret,
- 'token_key': tests.token1.key,
- 'token_secret': tests.token1.secret
- }})
- self.assertIn('oauth', db1._creds)
-
-
-class TestHTTPDatabaseIntegration(tests.TestCaseWithServer):
-
- make_app_with_state = staticmethod(make_http_app)
-
- def setUp(self):
- super(TestHTTPDatabaseIntegration, self).setUp()
- self.startServer()
-
- def test_non_existing_db(self):
- db = http_database.HTTPDatabase(self.getURL('not-there'))
- self.assertRaises(errors.DatabaseDoesNotExist, db.get_doc, 'doc1')
-
- def test__ensure(self):
- db = http_database.HTTPDatabase(self.getURL('new'))
- db._ensure()
- self.assertIs(None, db.get_doc('doc1'))
-
- def test__delete(self):
- self.request_state._create_database('db0')
- db = http_database.HTTPDatabase(self.getURL('db0'))
- db._delete()
- self.assertRaises(errors.DatabaseDoesNotExist,
- self.request_state.check_database, 'db0')
-
- def test_open_database_existing(self):
- self.request_state._create_database('db0')
- db = http_database.HTTPDatabase.open_database(self.getURL('db0'),
- create=False)
- self.assertIs(None, db.get_doc('doc1'))
-
- def test_open_database_non_existing(self):
- self.assertRaises(errors.DatabaseDoesNotExist,
- http_database.HTTPDatabase.open_database,
- self.getURL('not-there'),
- create=False)
-
- def test_open_database_create(self):
- db = http_database.HTTPDatabase.open_database(self.getURL('new'),
- create=True)
- self.assertIs(None, db.get_doc('doc1'))
-
- def test_delete_database_existing(self):
- self.request_state._create_database('db0')
- http_database.HTTPDatabase.delete_database(self.getURL('db0'))
- self.assertRaises(errors.DatabaseDoesNotExist,
- self.request_state.check_database, 'db0')
-
- def test_doc_ids_needing_quoting(self):
- db0 = self.request_state._create_database('db0')
- db = http_database.HTTPDatabase.open_database(self.getURL('db0'),
- create=False)
- doc = Document('%fff', None, '{}')
- db.put_doc(doc)
- self.assertGetDoc(db0, '%fff', doc.rev, '{}', False)
- self.assertGetDoc(db, '%fff', doc.rev, '{}', False)
diff --git a/src/leap/soledad/u1db/tests/test_https.py b/src/leap/soledad/u1db/tests/test_https.py
deleted file mode 100644
index 67681c8a..00000000
--- a/src/leap/soledad/u1db/tests/test_https.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""Test support for client-side https support."""
-
-import os
-import ssl
-import sys
-
-from paste import httpserver
-
-from u1db import (
- tests,
- )
-from u1db.remote import (
- http_client,
- http_target,
- )
-
-from u1db.tests.test_remote_sync_target import (
- make_oauth_http_app,
- )
-
-
-def https_server_def():
- def make_server(host_port, application):
- from OpenSSL import SSL
- cert_file = os.path.join(os.path.dirname(__file__), 'testing-certs',
- 'testing.cert')
- key_file = os.path.join(os.path.dirname(__file__), 'testing-certs',
- 'testing.key')
- ssl_context = SSL.Context(SSL.SSLv23_METHOD)
- ssl_context.use_privatekey_file(key_file)
- ssl_context.use_certificate_chain_file(cert_file)
- srv = httpserver.WSGIServerBase(application, host_port,
- httpserver.WSGIHandler,
- ssl_context=ssl_context
- )
-
- def shutdown_request(req):
- req.shutdown()
- srv.close_request(req)
-
- srv.shutdown_request = shutdown_request
- application.base_url = "https://localhost:%s" % srv.server_address[1]
- return srv
- return make_server, "shutdown", "https"
-
-
-def oauth_https_sync_target(test, host, path):
- _, port = test.server.server_address
- st = http_target.HTTPSyncTarget('https://%s:%d/~/%s' % (host, port, path))
- st.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return st
-
-
-class TestHttpSyncTargetHttpsSupport(tests.TestCaseWithServer):
-
- scenarios = [
- ('oauth_https', {'server_def': https_server_def,
- 'make_app_with_state': make_oauth_http_app,
- 'make_document_for_test': tests.make_document_for_test,
- 'sync_target': oauth_https_sync_target
- }),
- ]
-
- def setUp(self):
- try:
- import OpenSSL # noqa
- except ImportError:
- self.skipTest("Requires pyOpenSSL")
- self.cacert_pem = os.path.join(os.path.dirname(__file__),
- 'testing-certs', 'cacert.pem')
- super(TestHttpSyncTargetHttpsSupport, self).setUp()
-
- def getSyncTarget(self, host, path=None):
- if self.server is None:
- self.startServer()
- return self.sync_target(self, host, path)
-
- def test_working(self):
- self.startServer()
- db = self.request_state._create_database('test')
- self.patch(http_client, 'CA_CERTS', self.cacert_pem)
- remote_target = self.getSyncTarget('localhost', 'test')
- remote_target.record_sync_info('other-id', 2, 'T-id')
- self.assertEqual(
- (2, 'T-id'), db._get_replica_gen_and_trans_id('other-id'))
-
- def test_cannot_verify_cert(self):
- if not sys.platform.startswith('linux'):
- self.skipTest(
- "XXX certificate verification happens on linux only for now")
- self.startServer()
- # don't print expected traceback server-side
- self.server.handle_error = lambda req, cli_addr: None
- self.request_state._create_database('test')
- remote_target = self.getSyncTarget('localhost', 'test')
- try:
- remote_target.record_sync_info('other-id', 2, 'T-id')
- except ssl.SSLError, e:
- self.assertIn("certificate verify failed", str(e))
- else:
- self.fail("certificate verification should have failed.")
-
- def test_host_mismatch(self):
- if not sys.platform.startswith('linux'):
- self.skipTest(
- "XXX certificate verification happens on linux only for now")
- self.startServer()
- self.request_state._create_database('test')
- self.patch(http_client, 'CA_CERTS', self.cacert_pem)
- remote_target = self.getSyncTarget('127.0.0.1', 'test')
- self.assertRaises(
- http_client.CertificateError, remote_target.record_sync_info,
- 'other-id', 2, 'T-id')
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/u1db/tests/test_inmemory.py b/src/leap/soledad/u1db/tests/test_inmemory.py
deleted file mode 100644
index 255a1e08..00000000
--- a/src/leap/soledad/u1db/tests/test_inmemory.py
+++ /dev/null
@@ -1,128 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test in-memory backend internals."""
-
-from u1db import (
- errors,
- tests,
- )
-from u1db.backends import inmemory
-
-
-simple_doc = '{"key": "value"}'
-
-
-class TestInMemoryDatabaseInternals(tests.TestCase):
-
- def setUp(self):
- super(TestInMemoryDatabaseInternals, self).setUp()
- self.db = inmemory.InMemoryDatabase('test')
-
- def test__allocate_doc_rev_from_None(self):
- self.assertEqual('test:1', self.db._allocate_doc_rev(None))
-
- def test__allocate_doc_rev_incremental(self):
- self.assertEqual('test:2', self.db._allocate_doc_rev('test:1'))
-
- def test__allocate_doc_rev_other(self):
- self.assertEqual('replica:1|test:1',
- self.db._allocate_doc_rev('replica:1'))
-
- def test__get_replica_uid(self):
- self.assertEqual('test', self.db._replica_uid)
-
-
-class TestInMemoryIndex(tests.TestCase):
-
- def test_has_name_and_definition(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key'])
- self.assertEqual('idx-name', idx._name)
- self.assertEqual(['key'], idx._definition)
-
- def test_evaluate_json(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key'])
- self.assertEqual(['value'], idx.evaluate_json(simple_doc))
-
- def test_evaluate_json_field_None(self):
- idx = inmemory.InMemoryIndex('idx-name', ['missing'])
- self.assertEqual([], idx.evaluate_json(simple_doc))
-
- def test_evaluate_json_subfield_None(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key', 'missing'])
- self.assertEqual([], idx.evaluate_json(simple_doc))
-
- def test_evaluate_multi_index(self):
- doc = '{"key": "value", "key2": "value2"}'
- idx = inmemory.InMemoryIndex('idx-name', ['key', 'key2'])
- self.assertEqual(['value\x01value2'],
- idx.evaluate_json(doc))
-
- def test_update_ignores_None(self):
- idx = inmemory.InMemoryIndex('idx-name', ['nokey'])
- idx.add_json('doc-id', simple_doc)
- self.assertEqual({}, idx._values)
-
- def test_update_adds_entry(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key'])
- idx.add_json('doc-id', simple_doc)
- self.assertEqual({'value': ['doc-id']}, idx._values)
-
- def test_remove_json(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key'])
- idx.add_json('doc-id', simple_doc)
- self.assertEqual({'value': ['doc-id']}, idx._values)
- idx.remove_json('doc-id', simple_doc)
- self.assertEqual({}, idx._values)
-
- def test_remove_json_multiple(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key'])
- idx.add_json('doc-id', simple_doc)
- idx.add_json('doc2-id', simple_doc)
- self.assertEqual({'value': ['doc-id', 'doc2-id']}, idx._values)
- idx.remove_json('doc-id', simple_doc)
- self.assertEqual({'value': ['doc2-id']}, idx._values)
-
- def test_keys(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key'])
- idx.add_json('doc-id', simple_doc)
- self.assertEqual(['value'], idx.keys())
-
- def test_lookup(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key'])
- idx.add_json('doc-id', simple_doc)
- self.assertEqual(['doc-id'], idx.lookup(['value']))
-
- def test_lookup_multi(self):
- idx = inmemory.InMemoryIndex('idx-name', ['key'])
- idx.add_json('doc-id', simple_doc)
- idx.add_json('doc2-id', simple_doc)
- self.assertEqual(['doc-id', 'doc2-id'], idx.lookup(['value']))
-
- def test__find_non_wildcards(self):
- idx = inmemory.InMemoryIndex('idx-name', ['k1', 'k2', 'k3'])
- self.assertEqual(-1, idx._find_non_wildcards(('a', 'b', 'c')))
- self.assertEqual(2, idx._find_non_wildcards(('a', 'b', '*')))
- self.assertEqual(3, idx._find_non_wildcards(('a', 'b', 'c*')))
- self.assertEqual(2, idx._find_non_wildcards(('a', 'b*', '*')))
- self.assertEqual(0, idx._find_non_wildcards(('*', '*', '*')))
- self.assertEqual(1, idx._find_non_wildcards(('a*', '*', '*')))
- self.assertRaises(errors.InvalidValueForIndex,
- idx._find_non_wildcards, ('a', 'b'))
- self.assertRaises(errors.InvalidValueForIndex,
- idx._find_non_wildcards, ('a', 'b', 'c', 'd'))
- self.assertRaises(errors.InvalidGlobbing,
- idx._find_non_wildcards, ('*', 'b', 'c'))
diff --git a/src/leap/soledad/u1db/tests/test_open.py b/src/leap/soledad/u1db/tests/test_open.py
deleted file mode 100644
index fbeb0cfd..00000000
--- a/src/leap/soledad/u1db/tests/test_open.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test u1db.open"""
-
-import os
-
-from u1db import (
- errors,
- open as u1db_open,
- tests,
- )
-from u1db.backends import sqlite_backend
-from u1db.tests.test_backends import TestAlternativeDocument
-
-
-class TestU1DBOpen(tests.TestCase):
-
- def setUp(self):
- super(TestU1DBOpen, self).setUp()
- tmpdir = self.createTempDir()
- self.db_path = tmpdir + '/test.db'
-
- def test_open_no_create(self):
- self.assertRaises(errors.DatabaseDoesNotExist,
- u1db_open, self.db_path, create=False)
- self.assertFalse(os.path.exists(self.db_path))
-
- def test_open_create(self):
- db = u1db_open(self.db_path, create=True)
- self.addCleanup(db.close)
- self.assertTrue(os.path.exists(self.db_path))
- self.assertIsInstance(db, sqlite_backend.SQLiteDatabase)
-
- def test_open_with_factory(self):
- db = u1db_open(self.db_path, create=True,
- document_factory=TestAlternativeDocument)
- self.addCleanup(db.close)
- self.assertEqual(TestAlternativeDocument, db._factory)
-
- def test_open_existing(self):
- db = sqlite_backend.SQLitePartialExpandDatabase(self.db_path)
- self.addCleanup(db.close)
- doc = db.create_doc_from_json(tests.simple_doc)
- # Even though create=True, we shouldn't wipe the db
- db2 = u1db_open(self.db_path, create=True)
- self.addCleanup(db2.close)
- doc2 = db2.get_doc(doc.doc_id)
- self.assertEqual(doc, doc2)
-
- def test_open_existing_no_create(self):
- db = sqlite_backend.SQLitePartialExpandDatabase(self.db_path)
- self.addCleanup(db.close)
- db2 = u1db_open(self.db_path, create=False)
- self.addCleanup(db2.close)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
diff --git a/src/leap/soledad/u1db/tests/test_query_parser.py b/src/leap/soledad/u1db/tests/test_query_parser.py
deleted file mode 100644
index ee374267..00000000
--- a/src/leap/soledad/u1db/tests/test_query_parser.py
+++ /dev/null
@@ -1,443 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-from u1db import (
- errors,
- query_parser,
- tests,
- )
-
-
-trivial_raw_doc = {}
-
-
-class TestFieldName(tests.TestCase):
-
- def test_check_fieldname_valid(self):
- self.assertIsNone(query_parser.check_fieldname("foo"))
-
- def test_check_fieldname_invalid(self):
- self.assertRaises(
- errors.IndexDefinitionParseError, query_parser.check_fieldname,
- "foo.")
-
-
-class TestMakeTree(tests.TestCase):
-
- def setUp(self):
- super(TestMakeTree, self).setUp()
- self.parser = query_parser.Parser()
-
- def assertParseError(self, definition):
- self.assertRaises(
- errors.IndexDefinitionParseError, self.parser.parse,
- definition)
-
- def test_single_field(self):
- self.assertIsInstance(
- self.parser.parse('f'), query_parser.ExtractField)
-
- def test_single_mapping(self):
- self.assertIsInstance(
- self.parser.parse('bool(field1)'), query_parser.Bool)
-
- def test_nested_mapping(self):
- self.assertIsInstance(
- self.parser.parse('lower(split_words(field1))'),
- query_parser.Lower)
-
- def test_nested_branching_mapping(self):
- self.assertIsInstance(
- self.parser.parse(
- 'combine(lower(field1), split_words(field2), '
- 'number(field3, 5))'), query_parser.Combine)
-
- def test_single_mapping_multiple_fields(self):
- self.assertIsInstance(
- self.parser.parse('number(field1, 5)'), query_parser.Number)
-
- def test_unknown_mapping(self):
- self.assertParseError('mapping(whatever)')
-
- def test_parse_missing_close_paren(self):
- self.assertParseError("lower(a")
-
- def test_parse_trailing_chars(self):
- self.assertParseError("lower(ab))")
-
- def test_parse_empty_op(self):
- self.assertParseError("(ab)")
-
- def test_parse_top_level_commas(self):
- self.assertParseError("a, b")
-
- def test_invalid_field_name(self):
- self.assertParseError("a.")
-
- def test_invalid_inner_field_name(self):
- self.assertParseError("lower(a.)")
-
- def test_gobbledigook(self):
- self.assertParseError("(@#@cc @#!*DFJSXV(()jccd")
-
- def test_leading_space(self):
- self.assertIsInstance(
- self.parser.parse(" lower(a)"), query_parser.Lower)
-
- def test_trailing_space(self):
- self.assertIsInstance(
- self.parser.parse("lower(a) "), query_parser.Lower)
-
- def test_spaces_before_open_paren(self):
- self.assertIsInstance(
- self.parser.parse("lower (a)"), query_parser.Lower)
-
- def test_spaces_after_open_paren(self):
- self.assertIsInstance(
- self.parser.parse("lower( a)"), query_parser.Lower)
-
- def test_spaces_before_close_paren(self):
- self.assertIsInstance(
- self.parser.parse("lower(a )"), query_parser.Lower)
-
- def test_spaces_before_comma(self):
- self.assertIsInstance(
- self.parser.parse("number(a , 5)"), query_parser.Number)
-
- def test_spaces_after_comma(self):
- self.assertIsInstance(
- self.parser.parse("number(a, 5)"), query_parser.Number)
-
-
-class TestStaticGetter(tests.TestCase):
-
- def test_returns_string(self):
- getter = query_parser.StaticGetter('foo')
- self.assertEqual(['foo'], getter.get(trivial_raw_doc))
-
- def test_returns_int(self):
- getter = query_parser.StaticGetter(9)
- self.assertEqual([9], getter.get(trivial_raw_doc))
-
- def test_returns_float(self):
- getter = query_parser.StaticGetter(9.2)
- self.assertEqual([9.2], getter.get(trivial_raw_doc))
-
- def test_returns_None(self):
- getter = query_parser.StaticGetter(None)
- self.assertEqual([], getter.get(trivial_raw_doc))
-
- def test_returns_list(self):
- getter = query_parser.StaticGetter(['a', 'b'])
- self.assertEqual(['a', 'b'], getter.get(trivial_raw_doc))
-
-
-class TestExtractField(tests.TestCase):
-
- def assertExtractField(self, expected, field_name, raw_doc):
- getter = query_parser.ExtractField(field_name)
- self.assertEqual(expected, getter.get(raw_doc))
-
- def test_get_value(self):
- self.assertExtractField(['bar'], 'foo', {'foo': 'bar'})
-
- def test_get_value_None(self):
- self.assertExtractField([], 'foo', {'foo': None})
-
- def test_get_value_missing_key(self):
- self.assertExtractField([], 'foo', {})
-
- def test_get_value_subfield(self):
- self.assertExtractField(['bar'], 'foo.baz', {'foo': {'baz': 'bar'}})
-
- def test_get_value_subfield_missing(self):
- self.assertExtractField([], 'foo.baz', {'foo': 'bar'})
-
- def test_get_value_dict(self):
- self.assertExtractField([], 'foo', {'foo': {'baz': 'bar'}})
-
- def test_get_value_list(self):
- self.assertExtractField(['bar', 'zap'], 'foo', {'foo': ['bar', 'zap']})
-
- def test_get_value_mixed_list(self):
- self.assertExtractField(['bar', 'zap'], 'foo',
- {'foo': ['bar', ['baa'], 'zap', {'bing': 9}]})
-
- def test_get_value_list_of_dicts(self):
- self.assertExtractField([], 'foo', {'foo': [{'zap': 'bar'}]})
-
- def test_get_value_list_of_dicts2(self):
- self.assertExtractField(
- ['bar', 'baz'], 'foo.zap',
- {'foo': [{'zap': 'bar'}, {'zap': 'baz'}]})
-
- def test_get_value_int(self):
- self.assertExtractField([9], 'foo', {'foo': 9})
-
- def test_get_value_float(self):
- self.assertExtractField([9.2], 'foo', {'foo': 9.2})
-
- def test_get_value_bool(self):
- self.assertExtractField([True], 'foo', {'foo': True})
- self.assertExtractField([False], 'foo', {'foo': False})
-
-
-class TestLower(tests.TestCase):
-
- def assertLowerGets(self, expected, input_val):
- getter = query_parser.Lower(query_parser.StaticGetter(input_val))
- out_val = getter.get(trivial_raw_doc)
- self.assertEqual(sorted(expected), sorted(out_val))
-
- def test_inner_returns_None(self):
- self.assertLowerGets([], None)
-
- def test_inner_returns_string(self):
- self.assertLowerGets(['foo'], 'fOo')
-
- def test_inner_returns_list(self):
- self.assertLowerGets(['foo', 'bar'], ['fOo', 'bAr'])
-
- def test_inner_returns_int(self):
- self.assertLowerGets([], 9)
-
- def test_inner_returns_float(self):
- self.assertLowerGets([], 9.0)
-
- def test_inner_returns_bool(self):
- self.assertLowerGets([], True)
-
- def test_inner_returns_list_containing_int(self):
- self.assertLowerGets(['foo', 'bar'], ['fOo', 9, 'bAr'])
-
- def test_inner_returns_list_containing_float(self):
- self.assertLowerGets(['foo', 'bar'], ['fOo', 9.2, 'bAr'])
-
- def test_inner_returns_list_containing_bool(self):
- self.assertLowerGets(['foo', 'bar'], ['fOo', True, 'bAr'])
-
- def test_inner_returns_list_containing_list(self):
- # TODO: Should this be unfolding the inner list?
- self.assertLowerGets(['foo', 'bar'], ['fOo', ['bAa'], 'bAr'])
-
- def test_inner_returns_list_containing_dict(self):
- self.assertLowerGets(['foo', 'bar'], ['fOo', {'baa': 'xam'}, 'bAr'])
-
-
-class TestSplitWords(tests.TestCase):
-
- def assertSplitWords(self, expected, value):
- getter = query_parser.SplitWords(query_parser.StaticGetter(value))
- self.assertEqual(sorted(expected), sorted(getter.get(trivial_raw_doc)))
-
- def test_inner_returns_None(self):
- self.assertSplitWords([], None)
-
- def test_inner_returns_string(self):
- self.assertSplitWords(['foo', 'bar'], 'foo bar')
-
- def test_inner_returns_list(self):
- self.assertSplitWords(['foo', 'baz', 'bar', 'sux'],
- ['foo baz', 'bar sux'])
-
- def test_deduplicates(self):
- self.assertSplitWords(['bar'], ['bar', 'bar', 'bar'])
-
- def test_inner_returns_int(self):
- self.assertSplitWords([], 9)
-
- def test_inner_returns_float(self):
- self.assertSplitWords([], 9.2)
-
- def test_inner_returns_bool(self):
- self.assertSplitWords([], True)
-
- def test_inner_returns_list_containing_int(self):
- self.assertSplitWords(['foo', 'baz', 'bar', 'sux'],
- ['foo baz', 9, 'bar sux'])
-
- def test_inner_returns_list_containing_float(self):
- self.assertSplitWords(['foo', 'baz', 'bar', 'sux'],
- ['foo baz', 9.2, 'bar sux'])
-
- def test_inner_returns_list_containing_bool(self):
- self.assertSplitWords(['foo', 'baz', 'bar', 'sux'],
- ['foo baz', True, 'bar sux'])
-
- def test_inner_returns_list_containing_list(self):
- # TODO: Expand sub-lists?
- self.assertSplitWords(['foo', 'baz', 'bar', 'sux'],
- ['foo baz', ['baa'], 'bar sux'])
-
- def test_inner_returns_list_containing_dict(self):
- self.assertSplitWords(['foo', 'baz', 'bar', 'sux'],
- ['foo baz', {'baa': 'xam'}, 'bar sux'])
-
-
-class TestNumber(tests.TestCase):
-
- def assertNumber(self, expected, value, padding=5):
- """Assert number transformation produced expected values."""
- getter = query_parser.Number(query_parser.StaticGetter(value), padding)
- self.assertEqual(expected, getter.get(trivial_raw_doc))
-
- def test_inner_returns_None(self):
- """None is thrown away."""
- self.assertNumber([], None)
-
- def test_inner_returns_int(self):
- """A single integer is converted to zero padded strings."""
- self.assertNumber(['00009'], 9)
-
- def test_inner_returns_list(self):
- """Integers are converted to zero padded strings."""
- self.assertNumber(['00009', '00235'], [9, 235])
-
- def test_inner_returns_string(self):
- """A string is thrown away."""
- self.assertNumber([], 'foo bar')
-
- def test_inner_returns_float(self):
- """A float is thrown away."""
- self.assertNumber([], 9.2)
-
- def test_inner_returns_bool(self):
- """A boolean is thrown away."""
- self.assertNumber([], True)
-
- def test_inner_returns_list_containing_strings(self):
- """Strings in a list are thrown away."""
- self.assertNumber(['00009'], ['foo baz', 9, 'bar sux'])
-
- def test_inner_returns_list_containing_float(self):
- """Floats in a list are thrown away."""
- self.assertNumber(
- ['00083', '00073'], [83, 9.2, 73])
-
- def test_inner_returns_list_containing_bool(self):
- """Booleans in a list are thrown away."""
- self.assertNumber(
- ['00083', '00073'], [83, True, 73])
-
- def test_inner_returns_list_containing_list(self):
- """Lists in a list are thrown away."""
- # TODO: Expand sub-lists?
- self.assertNumber(
- ['00012', '03333'], [12, [29], 3333])
-
- def test_inner_returns_list_containing_dict(self):
- """Dicts in a list are thrown away."""
- self.assertNumber(
- ['00012', '00001'], [12, {54: 89}, 1])
-
-
-class TestIsNull(tests.TestCase):
-
- def assertIsNull(self, value):
- getter = query_parser.IsNull(query_parser.StaticGetter(value))
- self.assertEqual([True], getter.get(trivial_raw_doc))
-
- def assertIsNotNull(self, value):
- getter = query_parser.IsNull(query_parser.StaticGetter(value))
- self.assertEqual([False], getter.get(trivial_raw_doc))
-
- def test_inner_returns_None(self):
- self.assertIsNull(None)
-
- def test_inner_returns_string(self):
- self.assertIsNotNull('foo')
-
- def test_inner_returns_list(self):
- self.assertIsNotNull(['foo', 'bar'])
-
- def test_inner_returns_empty_list(self):
- # TODO: is this the behavior we want?
- self.assertIsNull([])
-
- def test_inner_returns_int(self):
- self.assertIsNotNull(9)
-
- def test_inner_returns_float(self):
- self.assertIsNotNull(9.2)
-
- def test_inner_returns_bool(self):
- self.assertIsNotNull(True)
-
- # TODO: What about a dict? Inner is likely to return None, even though the
- # attribute does exist...
-
-
-class TestParser(tests.TestCase):
-
- def parse(self, spec):
- parser = query_parser.Parser()
- return parser.parse(spec)
-
- def parse_all(self, specs):
- parser = query_parser.Parser()
- return parser.parse_all(specs)
-
- def assertParseError(self, definition):
- self.assertRaises(errors.IndexDefinitionParseError, self.parse,
- definition)
-
- def test_parse_empty_string(self):
- self.assertRaises(errors.IndexDefinitionParseError, self.parse, "")
-
- def test_parse_field(self):
- getter = self.parse("a")
- self.assertIsInstance(getter, query_parser.ExtractField)
- self.assertEqual(["a"], getter.field)
-
- def test_parse_dotted_field(self):
- getter = self.parse("a.b")
- self.assertIsInstance(getter, query_parser.ExtractField)
- self.assertEqual(["a", "b"], getter.field)
-
- def test_parse_dotted_field_nothing_after_dot(self):
- self.assertParseError("a.")
-
- def test_parse_missing_close_on_transformation(self):
- self.assertParseError("lower(a")
-
- def test_parse_missing_field_in_transformation(self):
- self.assertParseError("lower()")
-
- def test_parse_trailing_chars(self):
- self.assertParseError("lower(ab))")
-
- def test_parse_empty_op(self):
- self.assertParseError("(ab)")
-
- def test_parse_unknown_op(self):
- self.assertParseError("no_such_operation(field)")
-
- def test_parse_wrong_arg_type(self):
- self.assertParseError("number(field, fnord)")
-
- def test_parse_transformation(self):
- getter = self.parse("lower(a)")
- self.assertIsInstance(getter, query_parser.Lower)
- self.assertIsInstance(getter.inner, query_parser.ExtractField)
- self.assertEqual(["a"], getter.inner.field)
-
- def test_parse_all(self):
- getters = self.parse_all(["a", "b"])
- self.assertEqual(2, len(getters))
- self.assertIsInstance(getters[0], query_parser.ExtractField)
- self.assertEqual(["a"], getters[0].field)
- self.assertIsInstance(getters[1], query_parser.ExtractField)
- self.assertEqual(["b"], getters[1].field)
diff --git a/src/leap/soledad/u1db/tests/test_remote_sync_target.py b/src/leap/soledad/u1db/tests/test_remote_sync_target.py
deleted file mode 100644
index 3e0d8995..00000000
--- a/src/leap/soledad/u1db/tests/test_remote_sync_target.py
+++ /dev/null
@@ -1,314 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for the remote sync targets"""
-
-import cStringIO
-
-from u1db import (
- errors,
- tests,
- )
-from u1db.remote import (
- http_app,
- http_target,
- oauth_middleware,
- )
-
-
-class TestHTTPSyncTargetBasics(tests.TestCase):
-
- def test_parse_url(self):
- remote_target = http_target.HTTPSyncTarget('http://127.0.0.1:12345/')
- self.assertEqual('http', remote_target._url.scheme)
- self.assertEqual('127.0.0.1', remote_target._url.hostname)
- self.assertEqual(12345, remote_target._url.port)
- self.assertEqual('/', remote_target._url.path)
-
-
-class TestParsingSyncStream(tests.TestCase):
-
- def test_wrong_start(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "{}\r\n]", None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "\r\n{}\r\n]", None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "", None)
-
- def test_wrong_end(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n{}", None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n", None)
-
- def test_missing_comma(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream,
- '[\r\n{}\r\n{"id": "i", "rev": "r", '
- '"content": "c", "gen": 3}\r\n]', None)
-
- def test_no_entries(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n]", None)
-
- def test_extra_comma(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n{},\r\n]", None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream,
- '[\r\n{},\r\n{"id": "i", "rev": "r", '
- '"content": "{}", "gen": 3, "trans_id": "T-sid"}'
- ',\r\n]',
- lambda doc, gen, trans_id: None)
-
- def test_error_in_stream(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.Unavailable,
- tgt._parse_sync_stream,
- '[\r\n{"new_generation": 0},'
- '\r\n{"error": "unavailable"}\r\n', None)
-
- self.assertRaises(errors.Unavailable,
- tgt._parse_sync_stream,
- '[\r\n{"error": "unavailable"}\r\n', None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream,
- '[\r\n{"error": "?"}\r\n', None)
-
-
-def make_http_app(state):
- return http_app.HTTPApp(state)
-
-
-def http_sync_target(test, path):
- return http_target.HTTPSyncTarget(test.getURL(path))
-
-
-def make_oauth_http_app(state):
- app = http_app.HTTPApp(state)
- application = oauth_middleware.OAuthMiddleware(app, None, prefix='/~/')
- application.get_oauth_data_store = lambda: tests.testingOAuthStore
- return application
-
-
-def oauth_http_sync_target(test, path):
- st = http_sync_target(test, '~/' + path)
- st.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return st
-
-
-class TestRemoteSyncTargets(tests.TestCaseWithServer):
-
- scenarios = [
- ('http', {'make_app_with_state': make_http_app,
- 'make_document_for_test': tests.make_document_for_test,
- 'sync_target': http_sync_target}),
- ('oauth_http', {'make_app_with_state': make_oauth_http_app,
- 'make_document_for_test': tests.make_document_for_test,
- 'sync_target': oauth_http_sync_target}),
- ]
-
- def getSyncTarget(self, path=None):
- if self.server is None:
- self.startServer()
- return self.sync_target(self, path)
-
- def test_get_sync_info(self):
- self.startServer()
- db = self.request_state._create_database('test')
- db._set_replica_gen_and_trans_id('other-id', 1, 'T-transid')
- remote_target = self.getSyncTarget('test')
- self.assertEqual(('test', 0, '', 1, 'T-transid'),
- remote_target.get_sync_info('other-id'))
-
- def test_record_sync_info(self):
- self.startServer()
- db = self.request_state._create_database('test')
- remote_target = self.getSyncTarget('test')
- remote_target.record_sync_info('other-id', 2, 'T-transid')
- self.assertEqual(
- (2, 'T-transid'), db._get_replica_gen_and_trans_id('other-id'))
-
- def test_sync_exchange_send(self):
- self.startServer()
- db = self.request_state._create_database('test')
- remote_target = self.getSyncTarget('test')
- other_docs = []
-
- def receive_doc(doc):
- other_docs.append((doc.doc_id, doc.rev, doc.get_json()))
-
- doc = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- new_gen, trans_id = remote_target.sync_exchange(
- [(doc, 10, 'T-sid')], 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=receive_doc)
- self.assertEqual(1, new_gen)
- self.assertGetDoc(
- db, 'doc-here', 'replica:1', '{"value": "here"}', False)
-
- def test_sync_exchange_send_failure_and_retry_scenario(self):
- self.startServer()
-
- def blackhole_getstderr(inst):
- return cStringIO.StringIO()
-
- self.patch(self.server.RequestHandlerClass, 'get_stderr',
- blackhole_getstderr)
- db = self.request_state._create_database('test')
- _put_doc_if_newer = db._put_doc_if_newer
- trigger_ids = ['doc-here2']
-
- def bomb_put_doc_if_newer(doc, save_conflict,
- replica_uid=None, replica_gen=None,
- replica_trans_id=None):
- if doc.doc_id in trigger_ids:
- raise Exception
- return _put_doc_if_newer(doc, save_conflict=save_conflict,
- replica_uid=replica_uid, replica_gen=replica_gen,
- replica_trans_id=replica_trans_id)
- self.patch(db, '_put_doc_if_newer', bomb_put_doc_if_newer)
- remote_target = self.getSyncTarget('test')
- other_changes = []
-
- def receive_doc(doc, gen, trans_id):
- other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- doc1 = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- doc2 = self.make_document('doc-here2', 'replica:1',
- '{"value": "here2"}')
- self.assertRaises(
- errors.HTTPError,
- remote_target.sync_exchange,
- [(doc1, 10, 'T-sid'), (doc2, 11, 'T-sud')],
- 'replica', last_known_generation=0, last_known_trans_id=None,
- return_doc_cb=receive_doc)
- self.assertGetDoc(db, 'doc-here', 'replica:1', '{"value": "here"}',
- False)
- self.assertEqual(
- (10, 'T-sid'), db._get_replica_gen_and_trans_id('replica'))
- self.assertEqual([], other_changes)
- # retry
- trigger_ids = []
- new_gen, trans_id = remote_target.sync_exchange(
- [(doc2, 11, 'T-sud')], 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=receive_doc)
- self.assertGetDoc(db, 'doc-here2', 'replica:1', '{"value": "here2"}',
- False)
- self.assertEqual(
- (11, 'T-sud'), db._get_replica_gen_and_trans_id('replica'))
- self.assertEqual(2, new_gen)
- # bounced back to us
- self.assertEqual(
- ('doc-here', 'replica:1', '{"value": "here"}', 1),
- other_changes[0][:-1])
-
- def test_sync_exchange_in_stream_error(self):
- self.startServer()
-
- def blackhole_getstderr(inst):
- return cStringIO.StringIO()
-
- self.patch(self.server.RequestHandlerClass, 'get_stderr',
- blackhole_getstderr)
- db = self.request_state._create_database('test')
- doc = db.create_doc_from_json('{"value": "there"}')
-
- def bomb_get_docs(doc_ids, check_for_conflicts=None,
- include_deleted=False):
- yield doc
- # delayed failure case
- raise errors.Unavailable
-
- self.patch(db, 'get_docs', bomb_get_docs)
- remote_target = self.getSyncTarget('test')
- other_changes = []
-
- def receive_doc(doc, gen, trans_id):
- other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- self.assertRaises(
- errors.Unavailable, remote_target.sync_exchange, [], 'replica',
- last_known_generation=0, last_known_trans_id=None,
- return_doc_cb=receive_doc)
- self.assertEqual(
- (doc.doc_id, doc.rev, '{"value": "there"}', 1),
- other_changes[0][:-1])
-
- def test_sync_exchange_receive(self):
- self.startServer()
- db = self.request_state._create_database('test')
- doc = db.create_doc_from_json('{"value": "there"}')
- remote_target = self.getSyncTarget('test')
- other_changes = []
-
- def receive_doc(doc, gen, trans_id):
- other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- new_gen, trans_id = remote_target.sync_exchange(
- [], 'replica', last_known_generation=0, last_known_trans_id=None,
- return_doc_cb=receive_doc)
- self.assertEqual(1, new_gen)
- self.assertEqual(
- (doc.doc_id, doc.rev, '{"value": "there"}', 1),
- other_changes[0][:-1])
-
- def test_sync_exchange_send_ensure_callback(self):
- self.startServer()
- remote_target = self.getSyncTarget('test')
- other_docs = []
- replica_uid_box = []
-
- def receive_doc(doc):
- other_docs.append((doc.doc_id, doc.rev, doc.get_json()))
-
- def ensure_cb(replica_uid):
- replica_uid_box.append(replica_uid)
-
- doc = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- new_gen, trans_id = remote_target.sync_exchange(
- [(doc, 10, 'T-sid')], 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=receive_doc,
- ensure_callback=ensure_cb)
- self.assertEqual(1, new_gen)
- db = self.request_state.open_database('test')
- self.assertEqual(1, len(replica_uid_box))
- self.assertEqual(db._replica_uid, replica_uid_box[0])
- self.assertGetDoc(
- db, 'doc-here', 'replica:1', '{"value": "here"}', False)
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/u1db/tests/test_remote_utils.py b/src/leap/soledad/u1db/tests/test_remote_utils.py
deleted file mode 100644
index 959cd882..00000000
--- a/src/leap/soledad/u1db/tests/test_remote_utils.py
+++ /dev/null
@@ -1,36 +0,0 @@
-# Copyright 2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for protocol details utils."""
-
-from u1db.tests import TestCase
-from u1db.remote import utils
-
-
-class TestUtils(TestCase):
-
- def test_check_and_strip_comma(self):
- line, comma = utils.check_and_strip_comma("abc,")
- self.assertTrue(comma)
- self.assertEqual("abc", line)
-
- line, comma = utils.check_and_strip_comma("abc")
- self.assertFalse(comma)
- self.assertEqual("abc", line)
-
- line, comma = utils.check_and_strip_comma("")
- self.assertFalse(comma)
- self.assertEqual("", line)
diff --git a/src/leap/soledad/u1db/tests/test_server_state.py b/src/leap/soledad/u1db/tests/test_server_state.py
deleted file mode 100644
index fc3f1282..00000000
--- a/src/leap/soledad/u1db/tests/test_server_state.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for server state object."""
-
-import os
-
-from u1db import (
- errors,
- tests,
- )
-from u1db.remote import (
- server_state,
- )
-from u1db.backends import sqlite_backend
-
-
-class TestServerState(tests.TestCase):
-
- def setUp(self):
- super(TestServerState, self).setUp()
- self.state = server_state.ServerState()
-
- def test_set_workingdir(self):
- tempdir = self.createTempDir()
- self.state.set_workingdir(tempdir)
- self.assertTrue(self.state._relpath('path').startswith(tempdir))
-
- def test_open_database(self):
- tempdir = self.createTempDir()
- self.state.set_workingdir(tempdir)
- path = tempdir + '/test.db'
- self.assertFalse(os.path.exists(path))
- # Create the db, but don't do anything with it
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db = self.state.open_database('test.db')
- self.assertIsInstance(db, sqlite_backend.SQLitePartialExpandDatabase)
-
- def test_check_database(self):
- tempdir = self.createTempDir()
- self.state.set_workingdir(tempdir)
- path = tempdir + '/test.db'
- self.assertFalse(os.path.exists(path))
-
- # doesn't exist => raises
- self.assertRaises(errors.DatabaseDoesNotExist,
- self.state.check_database, 'test.db')
-
- # Create the db, but don't do anything with it
- sqlite_backend.SQLitePartialExpandDatabase(path)
- # exists => returns
- res = self.state.check_database('test.db')
- self.assertIsNone(res)
-
- def test_ensure_database(self):
- tempdir = self.createTempDir()
- self.state.set_workingdir(tempdir)
- path = tempdir + '/test.db'
- self.assertFalse(os.path.exists(path))
- db, replica_uid = self.state.ensure_database('test.db')
- self.assertIsInstance(db, sqlite_backend.SQLitePartialExpandDatabase)
- self.assertEqual(db._replica_uid, replica_uid)
- self.assertTrue(os.path.exists(path))
- db2 = self.state.open_database('test.db')
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
-
- def test_delete_database(self):
- tempdir = self.createTempDir()
- self.state.set_workingdir(tempdir)
- path = tempdir + '/test.db'
- db, _ = self.state.ensure_database('test.db')
- db.close()
- self.state.delete_database('test.db')
- self.assertFalse(os.path.exists(path))
-
- def test_delete_database_DoesNotExist(self):
- tempdir = self.createTempDir()
- self.state.set_workingdir(tempdir)
- self.assertRaises(errors.DatabaseDoesNotExist,
- self.state.delete_database, 'test.db')
diff --git a/src/leap/soledad/u1db/tests/test_sqlite_backend.py b/src/leap/soledad/u1db/tests/test_sqlite_backend.py
deleted file mode 100644
index 73330789..00000000
--- a/src/leap/soledad/u1db/tests/test_sqlite_backend.py
+++ /dev/null
@@ -1,493 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test sqlite backend internals."""
-
-import os
-import time
-import threading
-
-from sqlite3 import dbapi2
-
-from u1db import (
- errors,
- tests,
- query_parser,
- )
-from u1db.backends import sqlite_backend
-from u1db.tests.test_backends import TestAlternativeDocument
-
-
-simple_doc = '{"key": "value"}'
-nested_doc = '{"key": "value", "sub": {"doc": "underneath"}}'
-
-
-class TestSQLiteDatabase(tests.TestCase):
-
- def test_atomic_initialize(self):
- tmpdir = self.createTempDir()
- dbname = os.path.join(tmpdir, 'atomic.db')
-
- t2 = None # will be a thread
-
- class SQLiteDatabaseTesting(sqlite_backend.SQLiteDatabase):
- _index_storage_value = "testing"
-
- def __init__(self, dbname, ntry):
- self._try = ntry
- self._is_initialized_invocations = 0
- super(SQLiteDatabaseTesting, self).__init__(dbname)
-
- def _is_initialized(self, c):
- res = super(SQLiteDatabaseTesting, self)._is_initialized(c)
- if self._try == 1:
- self._is_initialized_invocations += 1
- if self._is_initialized_invocations == 2:
- t2.start()
- # hard to do better and have a generic test
- time.sleep(0.05)
- return res
-
- outcome2 = []
-
- def second_try():
- try:
- db2 = SQLiteDatabaseTesting(dbname, 2)
- except Exception, e:
- outcome2.append(e)
- else:
- outcome2.append(db2)
-
- t2 = threading.Thread(target=second_try)
- db1 = SQLiteDatabaseTesting(dbname, 1)
- t2.join()
-
- self.assertIsInstance(outcome2[0], SQLiteDatabaseTesting)
- db2 = outcome2[0]
- self.assertTrue(db2._is_initialized(db1._get_sqlite_handle().cursor()))
-
-
-class TestSQLitePartialExpandDatabase(tests.TestCase):
-
- def setUp(self):
- super(TestSQLitePartialExpandDatabase, self).setUp()
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- self.db._set_replica_uid('test')
-
- def test_create_database(self):
- raw_db = self.db._get_sqlite_handle()
- self.assertNotEqual(None, raw_db)
-
- def test_default_replica_uid(self):
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- self.assertIsNot(None, self.db._replica_uid)
- self.assertEqual(32, len(self.db._replica_uid))
- int(self.db._replica_uid, 16)
-
- def test__close_sqlite_handle(self):
- raw_db = self.db._get_sqlite_handle()
- self.db._close_sqlite_handle()
- self.assertRaises(dbapi2.ProgrammingError,
- raw_db.cursor)
-
- def test_create_database_initializes_schema(self):
- raw_db = self.db._get_sqlite_handle()
- c = raw_db.cursor()
- c.execute("SELECT * FROM u1db_config")
- config = dict([(r[0], r[1]) for r in c.fetchall()])
- self.assertEqual({'sql_schema': '0', 'replica_uid': 'test',
- 'index_storage': 'expand referenced'}, config)
-
- # These tables must exist, though we don't care what is in them yet
- c.execute("SELECT * FROM transaction_log")
- c.execute("SELECT * FROM document")
- c.execute("SELECT * FROM document_fields")
- c.execute("SELECT * FROM sync_log")
- c.execute("SELECT * FROM conflicts")
- c.execute("SELECT * FROM index_definitions")
-
- def test__parse_index(self):
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- g = self.db._parse_index_definition('fieldname')
- self.assertIsInstance(g, query_parser.ExtractField)
- self.assertEqual(['fieldname'], g.field)
-
- def test__update_indexes(self):
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- g = self.db._parse_index_definition('fieldname')
- c = self.db._get_sqlite_handle().cursor()
- self.db._update_indexes('doc-id', {'fieldname': 'val'},
- [('fieldname', g)], c)
- c.execute('SELECT doc_id, field_name, value FROM document_fields')
- self.assertEqual([('doc-id', 'fieldname', 'val')],
- c.fetchall())
-
- def test__set_replica_uid(self):
- # Start from scratch, so that replica_uid isn't set.
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- self.assertIsNot(None, self.db._real_replica_uid)
- self.assertIsNot(None, self.db._replica_uid)
- self.db._set_replica_uid('foo')
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT value FROM u1db_config WHERE name='replica_uid'")
- self.assertEqual(('foo',), c.fetchone())
- self.assertEqual('foo', self.db._real_replica_uid)
- self.assertEqual('foo', self.db._replica_uid)
- self.db._close_sqlite_handle()
- self.assertEqual('foo', self.db._replica_uid)
-
- def test__get_generation(self):
- self.assertEqual(0, self.db._get_generation())
-
- def test__get_generation_info(self):
- self.assertEqual((0, ''), self.db._get_generation_info())
-
- def test_create_index(self):
- self.db.create_index('test-idx', "key")
- self.assertEqual([('test-idx', ["key"])], self.db.list_indexes())
-
- def test_create_index_multiple_fields(self):
- self.db.create_index('test-idx', "key", "key2")
- self.assertEqual([('test-idx', ["key", "key2"])],
- self.db.list_indexes())
-
- def test__get_index_definition(self):
- self.db.create_index('test-idx', "key", "key2")
- # TODO: How would you test that an index is getting used for an SQL
- # request?
- self.assertEqual(["key", "key2"],
- self.db._get_index_definition('test-idx'))
-
- def test_list_index_mixed(self):
- # Make sure that we properly order the output
- c = self.db._get_sqlite_handle().cursor()
- # We intentionally insert the data in weird ordering, to make sure the
- # query still gets it back correctly.
- c.executemany("INSERT INTO index_definitions VALUES (?, ?, ?)",
- [('idx-1', 0, 'key10'),
- ('idx-2', 2, 'key22'),
- ('idx-1', 1, 'key11'),
- ('idx-2', 0, 'key20'),
- ('idx-2', 1, 'key21')])
- self.assertEqual([('idx-1', ['key10', 'key11']),
- ('idx-2', ['key20', 'key21', 'key22'])],
- self.db.list_indexes())
-
- def test_no_indexes_no_document_fields(self):
- self.db.create_doc_from_json(
- '{"key1": "val1", "key2": "val2"}')
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([], c.fetchall())
-
- def test_create_extracts_fields(self):
- doc1 = self.db.create_doc_from_json('{"key1": "val1", "key2": "val2"}')
- doc2 = self.db.create_doc_from_json('{"key1": "valx", "key2": "valy"}')
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([], c.fetchall())
- self.db.create_index('test', 'key1', 'key2')
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual(sorted(
- [(doc1.doc_id, "key1", "val1"),
- (doc1.doc_id, "key2", "val2"),
- (doc2.doc_id, "key1", "valx"),
- (doc2.doc_id, "key2", "valy"),
- ]), sorted(c.fetchall()))
-
- def test_put_updates_fields(self):
- self.db.create_index('test', 'key1', 'key2')
- doc1 = self.db.create_doc_from_json(
- '{"key1": "val1", "key2": "val2"}')
- doc1.content = {"key1": "val1", "key2": "valy"}
- self.db.put_doc(doc1)
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([(doc1.doc_id, "key1", "val1"),
- (doc1.doc_id, "key2", "valy"),
- ], c.fetchall())
-
- def test_put_updates_nested_fields(self):
- self.db.create_index('test', 'key', 'sub.doc')
- doc1 = self.db.create_doc_from_json(nested_doc)
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([(doc1.doc_id, "key", "value"),
- (doc1.doc_id, "sub.doc", "underneath"),
- ], c.fetchall())
-
- def test__ensure_schema_rollback(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/rollback.db'
-
- class SQLitePartialExpandDbTesting(
- sqlite_backend.SQLitePartialExpandDatabase):
-
- def _set_replica_uid_in_transaction(self, uid):
- super(SQLitePartialExpandDbTesting,
- self)._set_replica_uid_in_transaction(uid)
- if fail:
- raise Exception()
-
- db = SQLitePartialExpandDbTesting.__new__(SQLitePartialExpandDbTesting)
- db._db_handle = dbapi2.connect(path) # db is there but not yet init-ed
- fail = True
- self.assertRaises(Exception, db._ensure_schema)
- fail = False
- db._initialize(db._db_handle.cursor())
-
- def test__open_database(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/test.sqlite'
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db2 = sqlite_backend.SQLiteDatabase._open_database(path)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
-
- def test__open_database_with_factory(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/test.sqlite'
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db2 = sqlite_backend.SQLiteDatabase._open_database(
- path, document_factory=TestAlternativeDocument)
- self.assertEqual(TestAlternativeDocument, db2._factory)
-
- def test__open_database_non_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
- self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase._open_database, path)
-
- def test__open_database_during_init(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/initialised.db'
- db = sqlite_backend.SQLitePartialExpandDatabase.__new__(
- sqlite_backend.SQLitePartialExpandDatabase)
- db._db_handle = dbapi2.connect(path) # db is there but not yet init-ed
- self.addCleanup(db.close)
- observed = []
-
- class SQLiteDatabaseTesting(sqlite_backend.SQLiteDatabase):
- WAIT_FOR_PARALLEL_INIT_HALF_INTERVAL = 0.1
-
- @classmethod
- def _which_index_storage(cls, c):
- res = super(SQLiteDatabaseTesting, cls)._which_index_storage(c)
- db._ensure_schema() # init db
- observed.append(res[0])
- return res
-
- db2 = SQLiteDatabaseTesting._open_database(path)
- self.addCleanup(db2.close)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
- self.assertEqual([None,
- sqlite_backend.SQLitePartialExpandDatabase._index_storage_value],
- observed)
-
- def test__open_database_invalid(self):
- class SQLiteDatabaseTesting(sqlite_backend.SQLiteDatabase):
- WAIT_FOR_PARALLEL_INIT_HALF_INTERVAL = 0.1
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path1 = temp_dir + '/invalid1.db'
- with open(path1, 'wb') as f:
- f.write("")
- self.assertRaises(dbapi2.OperationalError,
- SQLiteDatabaseTesting._open_database, path1)
- with open(path1, 'wb') as f:
- f.write("invalid")
- self.assertRaises(dbapi2.DatabaseError,
- SQLiteDatabaseTesting._open_database, path1)
-
- def test_open_database_existing(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/existing.sqlite'
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db2 = sqlite_backend.SQLiteDatabase.open_database(path, create=False)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
-
- def test_open_database_with_factory(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/existing.sqlite'
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db2 = sqlite_backend.SQLiteDatabase.open_database(
- path, create=False, document_factory=TestAlternativeDocument)
- self.assertEqual(TestAlternativeDocument, db2._factory)
-
- def test_open_database_create(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/new.sqlite'
- sqlite_backend.SQLiteDatabase.open_database(path, create=True)
- db2 = sqlite_backend.SQLiteDatabase.open_database(path, create=False)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
-
- def test_open_database_non_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
- self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase.open_database, path,
- create=False)
-
- def test_delete_database_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/new.sqlite'
- db = sqlite_backend.SQLiteDatabase.open_database(path, create=True)
- db.close()
- sqlite_backend.SQLiteDatabase.delete_database(path)
- self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase.open_database, path,
- create=False)
-
- def test_delete_database_nonexistent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
- self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase.delete_database, path)
-
- def test__get_indexed_fields(self):
- self.db.create_index('idx1', 'a', 'b')
- self.assertEqual(set(['a', 'b']), self.db._get_indexed_fields())
- self.db.create_index('idx2', 'b', 'c')
- self.assertEqual(set(['a', 'b', 'c']), self.db._get_indexed_fields())
-
- def test_indexed_fields_expanded(self):
- self.db.create_index('idx1', 'key1')
- doc1 = self.db.create_doc_from_json('{"key1": "val1", "key2": "val2"}')
- self.assertEqual(set(['key1']), self.db._get_indexed_fields())
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([(doc1.doc_id, 'key1', 'val1')], c.fetchall())
-
- def test_create_index_updates_fields(self):
- doc1 = self.db.create_doc_from_json('{"key1": "val1", "key2": "val2"}')
- self.db.create_index('idx1', 'key1')
- self.assertEqual(set(['key1']), self.db._get_indexed_fields())
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([(doc1.doc_id, 'key1', 'val1')], c.fetchall())
-
- def assertFormatQueryEquals(self, exp_statement, exp_args, definition,
- values):
- statement, args = self.db._format_query(definition, values)
- self.assertEqual(exp_statement, statement)
- self.assertEqual(exp_args, args)
-
- def test__format_query(self):
- self.assertFormatQueryEquals(
- "SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM "
- "document d, document_fields d0 LEFT OUTER JOIN conflicts c ON "
- "c.doc_id = d.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name "
- "= ? AND d0.value = ? GROUP BY d.doc_id, d.doc_rev, d.content "
- "ORDER BY d0.value;", ["key1", "a"],
- ["key1"], ["a"])
-
- def test__format_query2(self):
- self.assertFormatQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value = ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value = ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value = ? GROUP BY d.doc_id, d.doc_rev, d.content ORDER BY '
- 'd0.value, d1.value, d2.value;',
- ["key1", "a", "key2", "b", "key3", "c"],
- ["key1", "key2", "key3"], ["a", "b", "c"])
-
- def test__format_query_wildcard(self):
- self.assertFormatQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value = ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value GLOB ? AND d.doc_id = d2.doc_id AND d2.field_name = ? '
- 'AND d2.value NOT NULL GROUP BY d.doc_id, d.doc_rev, d.content '
- 'ORDER BY d0.value, d1.value, d2.value;',
- ["key1", "a", "key2", "b*", "key3"], ["key1", "key2", "key3"],
- ["a", "b*", "*"])
-
- def assertFormatRangeQueryEquals(self, exp_statement, exp_args, definition,
- start_value, end_value):
- statement, args = self.db._format_range_query(
- definition, start_value, end_value)
- self.assertEqual(exp_statement, statement)
- self.assertEqual(exp_args, args)
-
- def test__format_range_query(self):
- self.assertFormatRangeQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value >= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value >= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value >= ? AND d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value <= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value <= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value <= ? GROUP BY d.doc_id, d.doc_rev, d.content ORDER BY '
- 'd0.value, d1.value, d2.value;',
- ['key1', 'a', 'key2', 'b', 'key3', 'c', 'key1', 'p', 'key2', 'q',
- 'key3', 'r'],
- ["key1", "key2", "key3"], ["a", "b", "c"], ["p", "q", "r"])
-
- def test__format_range_query_no_start(self):
- self.assertFormatRangeQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value <= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value <= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value <= ? GROUP BY d.doc_id, d.doc_rev, d.content ORDER BY '
- 'd0.value, d1.value, d2.value;',
- ['key1', 'a', 'key2', 'b', 'key3', 'c'],
- ["key1", "key2", "key3"], None, ["a", "b", "c"])
-
- def test__format_range_query_no_end(self):
- self.assertFormatRangeQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value >= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value >= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value >= ? GROUP BY d.doc_id, d.doc_rev, d.content ORDER BY '
- 'd0.value, d1.value, d2.value;',
- ['key1', 'a', 'key2', 'b', 'key3', 'c'],
- ["key1", "key2", "key3"], ["a", "b", "c"], None)
-
- def test__format_range_query_wildcard(self):
- self.assertFormatRangeQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value >= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value >= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value NOT NULL AND d.doc_id = d0.doc_id AND d0.field_name = ? '
- 'AND d0.value <= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? '
- 'AND (d1.value < ? OR d1.value GLOB ?) AND d.doc_id = d2.doc_id '
- 'AND d2.field_name = ? AND d2.value NOT NULL GROUP BY d.doc_id, '
- 'd.doc_rev, d.content ORDER BY d0.value, d1.value, d2.value;',
- ['key1', 'a', 'key2', 'b', 'key3', 'key1', 'p', 'key2', 'q', 'q*',
- 'key3'],
- ["key1", "key2", "key3"], ["a", "b*", "*"], ["p", "q*", "*"])
diff --git a/src/leap/soledad/u1db/tests/test_sync.py b/src/leap/soledad/u1db/tests/test_sync.py
deleted file mode 100644
index f2a925f0..00000000
--- a/src/leap/soledad/u1db/tests/test_sync.py
+++ /dev/null
@@ -1,1285 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""The Synchronization class for U1DB."""
-
-import os
-from wsgiref import simple_server
-
-from u1db import (
- errors,
- sync,
- tests,
- vectorclock,
- SyncTarget,
- )
-from u1db.backends import (
- inmemory,
- )
-from u1db.remote import (
- http_target,
- )
-
-from u1db.tests.test_remote_sync_target import (
- make_http_app,
- make_oauth_http_app,
- )
-
-simple_doc = tests.simple_doc
-nested_doc = tests.nested_doc
-
-
-def _make_local_db_and_target(test):
- db = test.create_database('test')
- st = db.get_sync_target()
- return db, st
-
-
-def _make_local_db_and_http_target(test, path='test'):
- test.startServer()
- db = test.request_state._create_database(os.path.basename(path))
- st = http_target.HTTPSyncTarget.connect(test.getURL(path))
- return db, st
-
-
-def _make_c_db_and_c_http_target(test, path='test'):
- test.startServer()
- db = test.request_state._create_database(os.path.basename(path))
- url = test.getURL(path)
- st = tests.c_backend_wrapper.create_http_sync_target(url)
- return db, st
-
-
-def _make_local_db_and_oauth_http_target(test):
- db, st = _make_local_db_and_http_target(test, '~/test')
- st.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return db, st
-
-
-def _make_c_db_and_oauth_http_target(test, path='~/test'):
- test.startServer()
- db = test.request_state._create_database(os.path.basename(path))
- url = test.getURL(path)
- st = tests.c_backend_wrapper.create_oauth_http_sync_target(url,
- tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return db, st
-
-
-target_scenarios = [
- ('local', {'create_db_and_target': _make_local_db_and_target}),
- ('http', {'create_db_and_target': _make_local_db_and_http_target,
- 'make_app_with_state': make_http_app}),
- ('oauth_http', {'create_db_and_target':
- _make_local_db_and_oauth_http_target,
- 'make_app_with_state': make_oauth_http_app}),
- ]
-
-c_db_scenarios = [
- ('local,c', {'create_db_and_target': _make_local_db_and_target,
- 'make_database_for_test': tests.make_c_database_for_test,
- 'copy_database_for_test': tests.copy_c_database_for_test,
- 'make_document_for_test': tests.make_c_document_for_test,
- 'whitebox': False}),
- ('http,c', {'create_db_and_target': _make_c_db_and_c_http_target,
- 'make_database_for_test': tests.make_c_database_for_test,
- 'copy_database_for_test': tests.copy_c_database_for_test,
- 'make_document_for_test': tests.make_c_document_for_test,
- 'make_app_with_state': make_http_app,
- 'whitebox': False}),
- ('oauth_http,c', {'create_db_and_target': _make_c_db_and_oauth_http_target,
- 'make_database_for_test': tests.make_c_database_for_test,
- 'copy_database_for_test': tests.copy_c_database_for_test,
- 'make_document_for_test': tests.make_c_document_for_test,
- 'make_app_with_state': make_oauth_http_app,
- 'whitebox': False}),
- ]
-
-
-class DatabaseSyncTargetTests(tests.DatabaseBaseTests,
- tests.TestCaseWithServer):
-
- scenarios = (tests.multiply_scenarios(tests.DatabaseBaseTests.scenarios,
- target_scenarios)
- + c_db_scenarios)
- # whitebox true means self.db is the actual local db object
- # against which the sync is performed
- whitebox = True
-
- def setUp(self):
- super(DatabaseSyncTargetTests, self).setUp()
- self.db, self.st = self.create_db_and_target(self)
- self.other_changes = []
-
- def tearDown(self):
- # We delete them explicitly, so that connections are cleanly closed
- del self.st
- self.db.close()
- del self.db
- super(DatabaseSyncTargetTests, self).tearDown()
-
- def receive_doc(self, doc, gen, trans_id):
- self.other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- def set_trace_hook(self, callback, shallow=False):
- setter = (self.st._set_trace_hook if not shallow else
- self.st._set_trace_hook_shallow)
- try:
- setter(callback)
- except NotImplementedError:
- self.skipTest("%s does not implement _set_trace_hook"
- % (self.st.__class__.__name__,))
-
- def test_get_sync_target(self):
- self.assertIsNot(None, self.st)
-
- def test_get_sync_info(self):
- self.assertEqual(
- ('test', 0, '', 0, ''), self.st.get_sync_info('other'))
-
- def test_create_doc_updates_sync_info(self):
- self.assertEqual(
- ('test', 0, '', 0, ''), self.st.get_sync_info('other'))
- self.db.create_doc_from_json(simple_doc)
- self.assertEqual(1, self.st.get_sync_info('other')[1])
-
- def test_record_sync_info(self):
- self.st.record_sync_info('replica', 10, 'T-transid')
- self.assertEqual(
- ('test', 0, '', 10, 'T-transid'), self.st.get_sync_info('replica'))
-
- def test_sync_exchange(self):
- docs_by_gen = [
- (self.make_document('doc-id', 'replica:1', simple_doc), 10,
- 'T-sid')]
- new_gen, trans_id = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertGetDoc(self.db, 'doc-id', 'replica:1', simple_doc, False)
- self.assertTransactionLog(['doc-id'], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 1, last_trans_id),
- (self.other_changes, new_gen, last_trans_id))
- self.assertEqual(10, self.st.get_sync_info('replica')[3])
-
- def test_sync_exchange_deleted(self):
- doc = self.db.create_doc_from_json('{}')
- edit_rev = 'replica:1|' + doc.rev
- docs_by_gen = [
- (self.make_document(doc.doc_id, edit_rev, None), 10, 'T-sid')]
- new_gen, trans_id = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, edit_rev, None, False)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 2, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- self.assertEqual(10, self.st.get_sync_info('replica')[3])
-
- def test_sync_exchange_push_many(self):
- docs_by_gen = [
- (self.make_document('doc-id', 'replica:1', simple_doc), 10, 'T-1'),
- (self.make_document('doc-id2', 'replica:1', nested_doc), 11,
- 'T-2')]
- new_gen, trans_id = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertGetDoc(self.db, 'doc-id', 'replica:1', simple_doc, False)
- self.assertGetDoc(self.db, 'doc-id2', 'replica:1', nested_doc, False)
- self.assertTransactionLog(['doc-id', 'doc-id2'], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 2, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- self.assertEqual(11, self.st.get_sync_info('replica')[3])
-
- def test_sync_exchange_refuses_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- (self.make_document(doc.doc_id, 'replica:1', new_doc), 10,
- 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(
- (doc.doc_id, doc.rev, simple_doc, 1), self.other_changes[0][:-1])
- self.assertEqual(1, new_gen)
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 1, 'docs': [(doc.doc_id, doc.rev)]})
-
- def test_sync_exchange_ignores_convergence(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- gen, txid = self.db._get_generation_info()
- docs_by_gen = [
- (self.make_document(doc.doc_id, doc.rev, simple_doc), 10, 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=gen,
- last_known_trans_id=txid, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(([], 1), (self.other_changes, new_gen))
-
- def test_sync_exchange_returns_new_docs(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_gen, _ = self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(
- (doc.doc_id, doc.rev, simple_doc, 1), self.other_changes[0][:-1])
- self.assertEqual(1, new_gen)
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 1, 'docs': [(doc.doc_id, doc.rev)]})
-
- def test_sync_exchange_returns_deleted_docs(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- new_gen, _ = self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- self.assertEqual(
- (doc.doc_id, doc.rev, None, 2), self.other_changes[0][:-1])
- self.assertEqual(2, new_gen)
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 2, 'docs': [(doc.doc_id, doc.rev)]})
-
- def test_sync_exchange_returns_many_new_docs(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertTransactionLog([doc.doc_id, doc2.doc_id], self.db)
- new_gen, _ = self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc2.doc_id], self.db)
- self.assertEqual(2, new_gen)
- self.assertEqual(
- [(doc.doc_id, doc.rev, simple_doc, 1),
- (doc2.doc_id, doc2.rev, nested_doc, 2)],
- [c[:-1] for c in self.other_changes])
- if self.whitebox:
- self.assertEqual(
- self.db._last_exchange_log['return'],
- {'last_gen': 2, 'docs':
- [(doc.doc_id, doc.rev), (doc2.doc_id, doc2.rev)]})
-
- def test_sync_exchange_getting_newer_docs(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- (self.make_document(doc.doc_id, 'test:1|z:2', new_doc), 10,
- 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- def test_sync_exchange_with_concurrent_updates_of_synced_doc(self):
- expected = []
-
- def before_whatschanged_cb(state):
- if state != 'before whats_changed':
- return
- cont = '{"key": "cuncurrent"}'
- conc_rev = self.db.put_doc(
- self.make_document(doc.doc_id, 'test:1|z:2', cont))
- expected.append((doc.doc_id, conc_rev, cont, 3))
-
- self.set_trace_hook(before_whatschanged_cb)
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- (self.make_document(doc.doc_id, 'test:1|z:2', new_doc), 10,
- 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertEqual(expected, [c[:-1] for c in self.other_changes])
- self.assertEqual(3, new_gen)
-
- def test_sync_exchange_with_concurrent_updates(self):
-
- def after_whatschanged_cb(state):
- if state != 'after whats_changed':
- return
- self.db.create_doc_from_json('{"new": "doc"}')
-
- self.set_trace_hook(after_whatschanged_cb)
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- (self.make_document(doc.doc_id, 'test:1|z:2', new_doc), 10,
- 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- def test_sync_exchange_converged_handling(self):
- doc = self.db.create_doc_from_json(simple_doc)
- docs_by_gen = [
- (self.make_document('new', 'other:1', '{}'), 4, 'T-foo'),
- (self.make_document(doc.doc_id, doc.rev, doc.get_json()), 5,
- 'T-bar')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- def test_sync_exchange_detect_incomplete_exchange(self):
- def before_get_docs_explode(state):
- if state != 'before get_docs':
- return
- raise errors.U1DBError("fail")
- self.set_trace_hook(before_get_docs_explode)
- # suppress traceback printing in the wsgiref server
- self.patch(simple_server.ServerHandler,
- 'log_exception', lambda h, exc_info: None)
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertRaises(
- (errors.U1DBError, errors.BrokenSyncStream),
- self.st.sync_exchange, [], 'other-replica',
- last_known_generation=0, last_known_trans_id=None,
- return_doc_cb=self.receive_doc)
-
- def test_sync_exchange_doc_ids(self):
- sync_exchange_doc_ids = getattr(self.st, 'sync_exchange_doc_ids', None)
- if sync_exchange_doc_ids is None:
- self.skipTest("sync_exchange_doc_ids not implemented")
- db2 = self.create_database('test2')
- doc = db2.create_doc_from_json(simple_doc)
- new_gen, trans_id = sync_exchange_doc_ids(
- db2, [(doc.doc_id, 10, 'T-sid')], 0, None,
- return_doc_cb=self.receive_doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
- self.assertTransactionLog([doc.doc_id], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 1, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- self.assertEqual(10, self.st.get_sync_info(db2._replica_uid)[3])
-
- def test__set_trace_hook(self):
- called = []
-
- def cb(state):
- called.append(state)
-
- self.set_trace_hook(cb)
- self.st.sync_exchange([], 'replica', 0, None, self.receive_doc)
- self.st.record_sync_info('replica', 0, 'T-sid')
- self.assertEqual(['before whats_changed',
- 'after whats_changed',
- 'before get_docs',
- 'record_sync_info',
- ],
- called)
-
- def test__set_trace_hook_shallow(self):
- if (self.st._set_trace_hook_shallow == self.st._set_trace_hook
- or self.st._set_trace_hook_shallow.im_func ==
- SyncTarget._set_trace_hook_shallow.im_func):
- # shallow same as full
- expected = ['before whats_changed',
- 'after whats_changed',
- 'before get_docs',
- 'record_sync_info',
- ]
- else:
- expected = ['sync_exchange', 'record_sync_info']
-
- called = []
-
- def cb(state):
- called.append(state)
-
- self.set_trace_hook(cb, shallow=True)
- self.st.sync_exchange([], 'replica', 0, None, self.receive_doc)
- self.st.record_sync_info('replica', 0, 'T-sid')
- self.assertEqual(expected, called)
-
-
-def sync_via_synchronizer(test, db_source, db_target, trace_hook=None,
- trace_hook_shallow=None):
- target = db_target.get_sync_target()
- trace_hook = trace_hook or trace_hook_shallow
- if trace_hook:
- target._set_trace_hook(trace_hook)
- return sync.Synchronizer(db_source, target).sync()
-
-
-sync_scenarios = []
-for name, scenario in tests.LOCAL_DATABASES_SCENARIOS:
- scenario = dict(scenario)
- scenario['do_sync'] = sync_via_synchronizer
- sync_scenarios.append((name, scenario))
- scenario = dict(scenario)
-
-
-def make_database_for_http_test(test, replica_uid):
- if test.server is None:
- test.startServer()
- db = test.request_state._create_database(replica_uid)
- try:
- http_at = test._http_at
- except AttributeError:
- http_at = test._http_at = {}
- http_at[db] = replica_uid
- return db
-
-
-def copy_database_for_http_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR HOUSE.
- if test.server is None:
- test.startServer()
- new_db = test.request_state._copy_database(db)
- try:
- http_at = test._http_at
- except AttributeError:
- http_at = test._http_at = {}
- path = db._replica_uid
- while path in http_at.values():
- path += 'copy'
- http_at[new_db] = path
- return new_db
-
-
-def sync_via_synchronizer_and_http(test, db_source, db_target,
- trace_hook=None, trace_hook_shallow=None):
- if trace_hook:
- test.skipTest("full trace hook unsupported over http")
- path = test._http_at[db_target]
- target = http_target.HTTPSyncTarget.connect(test.getURL(path))
- if trace_hook_shallow:
- target._set_trace_hook_shallow(trace_hook_shallow)
- return sync.Synchronizer(db_source, target).sync()
-
-
-sync_scenarios.append(('pyhttp', {
- 'make_database_for_test': make_database_for_http_test,
- 'copy_database_for_test': copy_database_for_http_test,
- 'make_document_for_test': tests.make_document_for_test,
- 'make_app_with_state': make_http_app,
- 'do_sync': sync_via_synchronizer_and_http
- }))
-
-
-if tests.c_backend_wrapper is not None:
- # TODO: We should hook up sync tests with an HTTP target
- def sync_via_c_sync(test, db_source, db_target, trace_hook=None,
- trace_hook_shallow=None):
- target = db_target.get_sync_target()
- trace_hook = trace_hook or trace_hook_shallow
- if trace_hook:
- target._set_trace_hook(trace_hook)
- return tests.c_backend_wrapper.sync_db_to_target(db_source, target)
-
- for name, scenario in tests.C_DATABASE_SCENARIOS:
- scenario = dict(scenario)
- scenario['do_sync'] = sync_via_synchronizer
- sync_scenarios.append((name + ',pysync', scenario))
- scenario = dict(scenario)
- scenario['do_sync'] = sync_via_c_sync
- sync_scenarios.append((name + ',csync', scenario))
-
-
-class DatabaseSyncTests(tests.DatabaseBaseTests,
- tests.TestCaseWithServer):
-
- scenarios = sync_scenarios
- do_sync = None # set by scenarios
-
- def create_database(self, replica_uid, sync_role=None):
- if replica_uid == 'test' and sync_role is None:
- # created up the chain by base class but unused
- return None
- db = self.create_database_for_role(replica_uid, sync_role)
- if sync_role:
- self._use_tracking[db] = (replica_uid, sync_role)
- return db
-
- def create_database_for_role(self, replica_uid, sync_role):
- # hook point for reuse
- return super(DatabaseSyncTests, self).create_database(replica_uid)
-
- def copy_database(self, db, sync_role=None):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES
- # IS THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST
- # THAT WE CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS
- # RATHER THAN CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND
- # NINJA TO YOUR HOUSE.
- db_copy = super(DatabaseSyncTests, self).copy_database(db)
- name, orig_sync_role = self._use_tracking[db]
- self._use_tracking[db_copy] = (name + '(copy)', sync_role
- or orig_sync_role)
- return db_copy
-
- def sync(self, db_from, db_to, trace_hook=None,
- trace_hook_shallow=None):
- from_name, from_sync_role = self._use_tracking[db_from]
- to_name, to_sync_role = self._use_tracking[db_to]
- if from_sync_role not in ('source', 'both'):
- raise Exception("%s marked for %s use but used as source" %
- (from_name, from_sync_role))
- if to_sync_role not in ('target', 'both'):
- raise Exception("%s marked for %s use but used as target" %
- (to_name, to_sync_role))
- return self.do_sync(self, db_from, db_to, trace_hook,
- trace_hook_shallow)
-
- def setUp(self):
- self._use_tracking = {}
- super(DatabaseSyncTests, self).setUp()
-
- def assertLastExchangeLog(self, db, expected):
- log = getattr(db, '_last_exchange_log', None)
- if log is None:
- return
- self.assertEqual(expected, log)
-
- def test_sync_tracks_db_generation_of_other(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.assertEqual(0, self.sync(self.db1, self.db2))
- self.assertEqual(
- (0, ''), self.db1._get_replica_gen_and_trans_id('test2'))
- self.assertEqual(
- (0, ''), self.db2._get_replica_gen_and_trans_id('test1'))
- self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [], 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 0}})
-
- def test_sync_autoresolves(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(simple_doc, doc_id='doc')
- rev1 = doc1.rev
- doc2 = self.db2.create_doc_from_json(simple_doc, doc_id='doc')
- rev2 = doc2.rev
- self.sync(self.db1, self.db2)
- doc = self.db1.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- self.assertEqual(doc.rev, self.db2.get_doc('doc').rev)
- v = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(v.is_newer(vectorclock.VectorClockRev(rev1)))
- self.assertTrue(v.is_newer(vectorclock.VectorClockRev(rev2)))
-
- def test_sync_autoresolves_moar(self):
- # here we test that when a database that has a conflicted document is
- # the source of a sync, and the target database has a revision of the
- # conflicted document that is newer than the source database's, and
- # that target's database's document's content is the same as the
- # source's document's conflict's, the source's document's conflict gets
- # autoresolved, and the source's document's revision bumped.
- #
- # idea is as follows:
- # A B
- # a1 -
- # `------->
- # a1 a1
- # v v
- # a2 a1b1
- # `------->
- # a1b1+a2 a1b1
- # v
- # a1b1+a2 a1b2 (a1b2 has same content as a2)
- # `------->
- # a3b2 a1b2 (autoresolved)
- # `------->
- # a3b2 a3b2
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(simple_doc, doc_id='doc')
- self.sync(self.db1, self.db2)
- for db, content in [(self.db1, '{}'), (self.db2, '{"hi": 42}')]:
- doc = db.get_doc('doc')
- doc.set_json(content)
- db.put_doc(doc)
- self.sync(self.db1, self.db2)
- # db1 and db2 now both have a doc of {hi:42}, but db1 has a conflict
- doc = self.db1.get_doc('doc')
- rev1 = doc.rev
- self.assertTrue(doc.has_conflicts)
- # set db2 to have a doc of {} (same as db1 before the conflict)
- doc = self.db2.get_doc('doc')
- doc.set_json('{}')
- self.db2.put_doc(doc)
- rev2 = doc.rev
- # sync it across
- self.sync(self.db1, self.db2)
- # tadaa!
- doc = self.db1.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- vec1 = vectorclock.VectorClockRev(rev1)
- vec2 = vectorclock.VectorClockRev(rev2)
- vec3 = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(vec3.is_newer(vec1))
- self.assertTrue(vec3.is_newer(vec2))
- # because the conflict is on the source, sync it another time
- self.sync(self.db1, self.db2)
- # make sure db2 now has the exact same thing
- self.assertEqual(self.db1.get_doc('doc'), self.db2.get_doc('doc'))
-
- def test_sync_autoresolves_moar_backwards(self):
- # here we test that when a database that has a conflicted document is
- # the target of a sync, and the source database has a revision of the
- # conflicted document that is newer than the target database's, and
- # that source's database's document's content is the same as the
- # target's document's conflict's, the target's document's conflict gets
- # autoresolved, and the document's revision bumped.
- #
- # idea is as follows:
- # A B
- # a1 -
- # `------->
- # a1 a1
- # v v
- # a2 a1b1
- # `------->
- # a1b1+a2 a1b1
- # v
- # a1b1+a2 a1b2 (a1b2 has same content as a2)
- # <-------'
- # a3b2 a3b2 (autoresolved and propagated)
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'both')
- self.db1.create_doc_from_json(simple_doc, doc_id='doc')
- self.sync(self.db1, self.db2)
- for db, content in [(self.db1, '{}'), (self.db2, '{"hi": 42}')]:
- doc = db.get_doc('doc')
- doc.set_json(content)
- db.put_doc(doc)
- self.sync(self.db1, self.db2)
- # db1 and db2 now both have a doc of {hi:42}, but db1 has a conflict
- doc = self.db1.get_doc('doc')
- rev1 = doc.rev
- self.assertTrue(doc.has_conflicts)
- revc = self.db1.get_doc_conflicts('doc')[-1].rev
- # set db2 to have a doc of {} (same as db1 before the conflict)
- doc = self.db2.get_doc('doc')
- doc.set_json('{}')
- self.db2.put_doc(doc)
- rev2 = doc.rev
- # sync it across
- self.sync(self.db2, self.db1)
- # tadaa!
- doc = self.db1.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- vec1 = vectorclock.VectorClockRev(rev1)
- vec2 = vectorclock.VectorClockRev(rev2)
- vec3 = vectorclock.VectorClockRev(doc.rev)
- vecc = vectorclock.VectorClockRev(revc)
- self.assertTrue(vec3.is_newer(vec1))
- self.assertTrue(vec3.is_newer(vec2))
- self.assertTrue(vec3.is_newer(vecc))
- # make sure db2 now has the exact same thing
- self.assertEqual(self.db1.get_doc('doc'), self.db2.get_doc('doc'))
-
- def test_sync_autoresolves_moar_backwards_three(self):
- # same as autoresolves_moar_backwards, but with three databases (note
- # all the syncs go in the same direction -- this is a more natural
- # scenario):
- #
- # A B C
- # a1 - -
- # `------->
- # a1 a1 -
- # `------->
- # a1 a1 a1
- # v v
- # a2 a1b1 a1
- # `------------------->
- # a2 a1b1 a2
- # `------->
- # a2+a1b1 a2
- # v
- # a2 a2+a1b1 a2c1 (same as a1b1)
- # `------------------->
- # a2c1 a2+a1b1 a2c1
- # `------->
- # a2b2c1 a2b2c1 a2c1
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'both')
- self.db3 = self.create_database('test3', 'target')
- self.db1.create_doc_from_json(simple_doc, doc_id='doc')
- self.sync(self.db1, self.db2)
- self.sync(self.db2, self.db3)
- for db, content in [(self.db2, '{"hi": 42}'),
- (self.db1, '{}'),
- ]:
- doc = db.get_doc('doc')
- doc.set_json(content)
- db.put_doc(doc)
- self.sync(self.db1, self.db3)
- self.sync(self.db2, self.db3)
- # db2 and db3 now both have a doc of {}, but db2 has a
- # conflict
- doc = self.db2.get_doc('doc')
- self.assertTrue(doc.has_conflicts)
- revc = self.db2.get_doc_conflicts('doc')[-1].rev
- self.assertEqual('{}', doc.get_json())
- self.assertEqual(self.db3.get_doc('doc').get_json(), doc.get_json())
- self.assertEqual(self.db3.get_doc('doc').rev, doc.rev)
- # set db3 to have a doc of {hi:42} (same as db2 before the conflict)
- doc = self.db3.get_doc('doc')
- doc.set_json('{"hi": 42}')
- self.db3.put_doc(doc)
- rev3 = doc.rev
- # sync it across to db1
- self.sync(self.db1, self.db3)
- # db1 now has hi:42, with a rev that is newer than db2's doc
- doc = self.db1.get_doc('doc')
- rev1 = doc.rev
- self.assertFalse(doc.has_conflicts)
- self.assertEqual('{"hi": 42}', doc.get_json())
- VCR = vectorclock.VectorClockRev
- self.assertTrue(VCR(rev1).is_newer(VCR(self.db2.get_doc('doc').rev)))
- # so sync it to db2
- self.sync(self.db1, self.db2)
- # tadaa!
- doc = self.db2.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- # db2's revision of the document is strictly newer than db1's before
- # the sync, and db3's before that sync way back when
- self.assertTrue(VCR(doc.rev).is_newer(VCR(rev1)))
- self.assertTrue(VCR(doc.rev).is_newer(VCR(rev3)))
- self.assertTrue(VCR(doc.rev).is_newer(VCR(revc)))
- # make sure both dbs now have the exact same thing
- self.assertEqual(self.db1.get_doc('doc'), self.db2.get_doc('doc'))
-
- def test_sync_puts_changes(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db1.create_doc_from_json(simple_doc)
- self.assertEqual(1, self.sync(self.db1, self.db2))
- self.assertGetDoc(self.db2, doc.doc_id, doc.rev, simple_doc, False)
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- self.assertEqual(1, self.db2._get_replica_gen_and_trans_id('test1')[0])
- self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc.doc_id, doc.rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 1}})
-
- def test_sync_pulls_changes(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db2.create_doc_from_json(simple_doc)
- self.db1.create_index('test-idx', 'key')
- self.assertEqual(0, self.sync(self.db1, self.db2))
- self.assertGetDoc(self.db1, doc.doc_id, doc.rev, simple_doc, False)
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- self.assertEqual(1, self.db2._get_replica_gen_and_trans_id('test1')[0])
- self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [], 'last_known_gen': 0},
- 'return': {'docs': [(doc.doc_id, doc.rev)],
- 'last_gen': 1}})
- self.assertEqual([doc], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_pulling_doesnt_update_other_if_changed(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db2.create_doc_from_json(simple_doc)
- # After the local side has sent its list of docs, before we start
- # receiving the "targets" response, we update the local database with a
- # new record.
- # When we finish synchronizing, we can notice that something locally
- # was updated, and we cannot tell c2 our new updated generation
-
- def before_get_docs(state):
- if state != 'before get_docs':
- return
- self.db1.create_doc_from_json(simple_doc)
-
- self.assertEqual(0, self.sync(self.db1, self.db2,
- trace_hook=before_get_docs))
- self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [], 'last_known_gen': 0},
- 'return': {'docs': [(doc.doc_id, doc.rev)],
- 'last_gen': 1}})
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- # c2 should not have gotten a '_record_sync_info' call, because the
- # local database had been updated more than just by the messages
- # returned from c2.
- self.assertEqual(
- (0, ''), self.db2._get_replica_gen_and_trans_id('test1'))
-
- def test_sync_doesnt_update_other_if_nothing_pulled(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(simple_doc)
-
- def no_record_sync_info(state):
- if state != 'record_sync_info':
- return
- self.fail('SyncTarget.record_sync_info was called')
- self.assertEqual(1, self.sync(self.db1, self.db2,
- trace_hook_shallow=no_record_sync_info))
- self.assertEqual(
- 1,
- self.db2._get_replica_gen_and_trans_id(self.db1._replica_uid)[0])
-
- def test_sync_ignores_convergence(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'both')
- doc = self.db1.create_doc_from_json(simple_doc)
- self.db3 = self.create_database('test3', 'target')
- self.assertEqual(1, self.sync(self.db1, self.db3))
- self.assertEqual(0, self.sync(self.db2, self.db3))
- self.assertEqual(1, self.sync(self.db1, self.db2))
- self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc.doc_id, doc.rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 1}})
-
- def test_sync_ignores_superseded(self):
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'both')
- doc = self.db1.create_doc_from_json(simple_doc)
- doc_rev1 = doc.rev
- self.db3 = self.create_database('test3', 'target')
- self.sync(self.db1, self.db3)
- self.sync(self.db2, self.db3)
- new_content = '{"key": "altval"}'
- doc.set_json(new_content)
- self.db1.put_doc(doc)
- doc_rev2 = doc.rev
- self.sync(self.db2, self.db1)
- self.assertLastExchangeLog(self.db1,
- {'receive': {'docs': [(doc.doc_id, doc_rev1)],
- 'source_uid': 'test2',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [(doc.doc_id, doc_rev2)],
- 'last_gen': 2}})
- self.assertGetDoc(self.db1, doc.doc_id, doc_rev2, new_content, False)
-
- def test_sync_sees_remote_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(simple_doc)
- doc_id = doc1.doc_id
- doc1_rev = doc1.rev
- self.db1.create_index('test-idx', 'key')
- new_doc = '{"key": "altval"}'
- doc2 = self.db2.create_doc_from_json(new_doc, doc_id=doc_id)
- doc2_rev = doc2.rev
- self.assertTransactionLog([doc1.doc_id], self.db1)
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc_id, doc1_rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [(doc_id, doc2_rev)],
- 'last_gen': 1}})
- self.assertTransactionLog([doc_id, doc_id], self.db1)
- self.assertGetDoc(self.db1, doc_id, doc2_rev, new_doc, True)
- self.assertGetDoc(self.db2, doc_id, doc2_rev, new_doc, False)
- from_idx = self.db1.get_from_index('test-idx', 'altval')[0]
- self.assertEqual(doc2.doc_id, from_idx.doc_id)
- self.assertEqual(doc2.rev, from_idx.rev)
- self.assertTrue(from_idx.has_conflicts)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_sees_remote_delete_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(simple_doc)
- doc_id = doc1.doc_id
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- doc2 = self.make_document(doc1.doc_id, doc1.rev, doc1.get_json())
- new_doc = '{"key": "altval"}'
- doc1.set_json(new_doc)
- self.db1.put_doc(doc1)
- self.db2.delete_doc(doc2)
- self.assertTransactionLog([doc_id, doc_id], self.db1)
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc_id, doc1.rev)],
- 'source_uid': 'test1',
- 'source_gen': 2, 'last_known_gen': 1},
- 'return': {'docs': [(doc_id, doc2.rev)],
- 'last_gen': 2}})
- self.assertTransactionLog([doc_id, doc_id, doc_id], self.db1)
- self.assertGetDocIncludeDeleted(self.db1, doc_id, doc2.rev, None, True)
- self.assertGetDocIncludeDeleted(
- self.db2, doc_id, doc2.rev, None, False)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_local_race_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db1.create_doc_from_json(simple_doc)
- doc_id = doc.doc_id
- doc1_rev = doc.rev
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- content1 = '{"key": "localval"}'
- content2 = '{"key": "altval"}'
- doc.set_json(content2)
- self.db2.put_doc(doc)
- doc2_rev2 = doc.rev
- triggered = []
-
- def after_whatschanged(state):
- if state != 'after whats_changed':
- return
- triggered.append(True)
- doc = self.make_document(doc_id, doc1_rev, content1)
- self.db1.put_doc(doc)
-
- self.sync(self.db1, self.db2, trace_hook=after_whatschanged)
- self.assertEqual([True], triggered)
- self.assertGetDoc(self.db1, doc_id, doc2_rev2, content2, True)
- from_idx = self.db1.get_from_index('test-idx', 'altval')[0]
- self.assertEqual(doc.doc_id, from_idx.doc_id)
- self.assertEqual(doc.rev, from_idx.rev)
- self.assertTrue(from_idx.has_conflicts)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
- self.assertEqual([], self.db1.get_from_index('test-idx', 'localval'))
-
- def test_sync_propagates_deletes(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'both')
- doc1 = self.db1.create_doc_from_json(simple_doc)
- doc_id = doc1.doc_id
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- self.db2.create_index('test-idx', 'key')
- self.db3 = self.create_database('test3', 'target')
- self.sync(self.db1, self.db3)
- self.db1.delete_doc(doc1)
- deleted_rev = doc1.rev
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive': {'docs': [(doc_id, deleted_rev)],
- 'source_uid': 'test1',
- 'source_gen': 2, 'last_known_gen': 1},
- 'return': {'docs': [], 'last_gen': 2}})
- self.assertGetDocIncludeDeleted(
- self.db1, doc_id, deleted_rev, None, False)
- self.assertGetDocIncludeDeleted(
- self.db2, doc_id, deleted_rev, None, False)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
- self.assertEqual([], self.db2.get_from_index('test-idx', 'value'))
- self.sync(self.db2, self.db3)
- self.assertLastExchangeLog(self.db3,
- {'receive': {'docs': [(doc_id, deleted_rev)],
- 'source_uid': 'test2',
- 'source_gen': 2, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 2}})
- self.assertGetDocIncludeDeleted(
- self.db3, doc_id, deleted_rev, None, False)
-
- def test_sync_propagates_resolution(self):
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'both')
- doc1 = self.db1.create_doc_from_json('{"a": 1}', doc_id='the-doc')
- db3 = self.create_database('test3', 'both')
- self.sync(self.db2, self.db1)
- self.assertEqual(
- self.db1._get_generation_info(),
- self.db2._get_replica_gen_and_trans_id(self.db1._replica_uid))
- self.assertEqual(
- self.db2._get_generation_info(),
- self.db1._get_replica_gen_and_trans_id(self.db2._replica_uid))
- self.sync(db3, self.db1)
- # update on 2
- doc2 = self.make_document('the-doc', doc1.rev, '{"a": 2}')
- self.db2.put_doc(doc2)
- self.sync(self.db2, db3)
- self.assertEqual(db3.get_doc('the-doc').rev, doc2.rev)
- # update on 1
- doc1.set_json('{"a": 3}')
- self.db1.put_doc(doc1)
- # conflicts
- self.sync(self.db2, self.db1)
- self.sync(db3, self.db1)
- self.assertTrue(self.db2.get_doc('the-doc').has_conflicts)
- self.assertTrue(db3.get_doc('the-doc').has_conflicts)
- # resolve
- conflicts = self.db2.get_doc_conflicts('the-doc')
- doc4 = self.make_document('the-doc', None, '{"a": 4}')
- revs = [doc.rev for doc in conflicts]
- self.db2.resolve_doc(doc4, revs)
- doc2 = self.db2.get_doc('the-doc')
- self.assertEqual(doc4.get_json(), doc2.get_json())
- self.assertFalse(doc2.has_conflicts)
- self.sync(self.db2, db3)
- doc3 = db3.get_doc('the-doc')
- self.assertEqual(doc4.get_json(), doc3.get_json())
- self.assertFalse(doc3.has_conflicts)
-
- def test_sync_supersedes_conflicts(self):
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'target')
- db3 = self.create_database('test3', 'both')
- doc1 = self.db1.create_doc_from_json('{"a": 1}', doc_id='the-doc')
- self.db2.create_doc_from_json('{"b": 1}', doc_id='the-doc')
- db3.create_doc_from_json('{"c": 1}', doc_id='the-doc')
- self.sync(db3, self.db1)
- self.assertEqual(
- self.db1._get_generation_info(),
- db3._get_replica_gen_and_trans_id(self.db1._replica_uid))
- self.assertEqual(
- db3._get_generation_info(),
- self.db1._get_replica_gen_and_trans_id(db3._replica_uid))
- self.sync(db3, self.db2)
- self.assertEqual(
- self.db2._get_generation_info(),
- db3._get_replica_gen_and_trans_id(self.db2._replica_uid))
- self.assertEqual(
- db3._get_generation_info(),
- self.db2._get_replica_gen_and_trans_id(db3._replica_uid))
- self.assertEqual(3, len(db3.get_doc_conflicts('the-doc')))
- doc1.set_json('{"a": 2}')
- self.db1.put_doc(doc1)
- self.sync(db3, self.db1)
- # original doc1 should have been removed from conflicts
- self.assertEqual(3, len(db3.get_doc_conflicts('the-doc')))
-
- def test_sync_stops_after_get_sync_info(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc)
- self.sync(self.db1, self.db2)
-
- def put_hook(state):
- self.fail("Tracehook triggered for %s" % (state,))
-
- self.sync(self.db1, self.db2, trace_hook_shallow=put_hook)
-
- def test_sync_detects_rollback_in_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc1')
- self.sync(self.db1, self.db2)
- db1_copy = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidGeneration, self.sync, db1_copy, self.db2)
-
- def test_sync_detects_rollback_in_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- db2_copy = self.copy_database(self.db2)
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidGeneration, self.sync, self.db1, db2_copy)
-
- def test_sync_detects_diverged_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- db3 = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- db3.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, db3, self.db2)
-
- def test_sync_detects_diverged_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- db3 = self.copy_database(self.db2)
- db3.create_doc_from_json(tests.nested_doc, doc_id="divergent")
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, self.db1, db3)
-
- def test_sync_detects_rollback_and_divergence_in_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc1')
- self.sync(self.db1, self.db2)
- db1_copy = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.sync(self.db1, self.db2)
- db1_copy.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- db1_copy.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, db1_copy, self.db2)
-
- def test_sync_detects_rollback_and_divergence_in_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- db2_copy = self.copy_database(self.db2)
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.sync(self.db1, self.db2)
- db2_copy.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- db2_copy.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, self.db1, db2_copy)
-
-
-class TestDbSync(tests.TestCaseWithServer):
- """Test db.sync remote sync shortcut"""
-
- scenarios = [
- ('py-http', {
- 'make_app_with_state': make_http_app,
- 'make_database_for_test': tests.make_memory_database_for_test,
- }),
- ('c-http', {
- 'make_app_with_state': make_http_app,
- 'make_database_for_test': tests.make_c_database_for_test
- }),
- ('py-oauth-http', {
- 'make_app_with_state': make_oauth_http_app,
- 'make_database_for_test': tests.make_memory_database_for_test,
- 'oauth': True
- }),
- ('c-oauth-http', {
- 'make_app_with_state': make_oauth_http_app,
- 'make_database_for_test': tests.make_c_database_for_test,
- 'oauth': True
- }),
- ]
-
- oauth = False
-
- def do_sync(self, target_name):
- if self.oauth:
- path = '~/' + target_name
- extra = dict(creds={'oauth': {
- 'consumer_key': tests.consumer1.key,
- 'consumer_secret': tests.consumer1.secret,
- 'token_key': tests.token1.key,
- 'token_secret': tests.token1.secret
- }})
- else:
- path = target_name
- extra = {}
- target_url = self.getURL(path)
- return self.db.sync(target_url, **extra)
-
- def setUp(self):
- super(TestDbSync, self).setUp()
- self.startServer()
- self.db = self.make_database_for_test(self, 'test1')
- self.db2 = self.request_state._create_database('test2.db')
-
- def test_db_sync(self):
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- doc2 = self.db2.create_doc_from_json(tests.nested_doc)
- local_gen_before_sync = self.do_sync('test2.db')
- gen, _, changes = self.db.whats_changed(local_gen_before_sync)
- self.assertEqual(1, len(changes))
- self.assertEqual(doc2.doc_id, changes[0][0])
- self.assertEqual(1, gen - local_gen_before_sync)
- self.assertGetDoc(self.db2, doc1.doc_id, doc1.rev, tests.simple_doc,
- False)
- self.assertGetDoc(self.db, doc2.doc_id, doc2.rev, tests.nested_doc,
- False)
-
- def test_db_sync_autocreate(self):
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- local_gen_before_sync = self.do_sync('test3.db')
- gen, _, changes = self.db.whats_changed(local_gen_before_sync)
- self.assertEqual(0, gen - local_gen_before_sync)
- db3 = self.request_state.open_database('test3.db')
- gen, _, changes = db3.whats_changed()
- self.assertEqual(1, len(changes))
- self.assertEqual(doc1.doc_id, changes[0][0])
- self.assertGetDoc(db3, doc1.doc_id, doc1.rev, tests.simple_doc,
- False)
- t_gen, _ = self.db._get_replica_gen_and_trans_id('test3.db')
- s_gen, _ = db3._get_replica_gen_and_trans_id('test1')
- self.assertEqual(1, t_gen)
- self.assertEqual(1, s_gen)
-
-
-class TestRemoteSyncIntegration(tests.TestCaseWithServer):
- """Integration tests for the most common sync scenario local -> remote"""
-
- make_app_with_state = staticmethod(make_http_app)
-
- def setUp(self):
- super(TestRemoteSyncIntegration, self).setUp()
- self.startServer()
- self.db1 = inmemory.InMemoryDatabase('test1')
- self.db2 = self.request_state._create_database('test2')
-
- def test_sync_tracks_generations_incrementally(self):
- doc11 = self.db1.create_doc_from_json('{"a": 1}')
- doc12 = self.db1.create_doc_from_json('{"a": 2}')
- doc21 = self.db2.create_doc_from_json('{"b": 1}')
- doc22 = self.db2.create_doc_from_json('{"b": 2}')
- #sanity
- self.assertEqual(2, len(self.db1._get_transaction_log()))
- self.assertEqual(2, len(self.db2._get_transaction_log()))
- progress1 = []
- progress2 = []
- _do_set_replica_gen_and_trans_id = \
- self.db1._do_set_replica_gen_and_trans_id
-
- def set_sync_generation_witness1(other_uid, other_gen, trans_id):
- progress1.append((other_uid, other_gen,
- [d for d, t in self.db1._get_transaction_log()[2:]]))
- _do_set_replica_gen_and_trans_id(other_uid, other_gen, trans_id)
- self.patch(self.db1, '_do_set_replica_gen_and_trans_id',
- set_sync_generation_witness1)
- _do_set_replica_gen_and_trans_id2 = \
- self.db2._do_set_replica_gen_and_trans_id
-
- def set_sync_generation_witness2(other_uid, other_gen, trans_id):
- progress2.append((other_uid, other_gen,
- [d for d, t in self.db2._get_transaction_log()[2:]]))
- _do_set_replica_gen_and_trans_id2(other_uid, other_gen, trans_id)
- self.patch(self.db2, '_do_set_replica_gen_and_trans_id',
- set_sync_generation_witness2)
-
- db2_url = self.getURL('test2')
- self.db1.sync(db2_url)
-
- self.assertEqual([('test2', 1, [doc21.doc_id]),
- ('test2', 2, [doc21.doc_id, doc22.doc_id]),
- ('test2', 4, [doc21.doc_id, doc22.doc_id])],
- progress1)
- self.assertEqual([('test1', 1, [doc11.doc_id]),
- ('test1', 2, [doc11.doc_id, doc12.doc_id]),
- ('test1', 4, [doc11.doc_id, doc12.doc_id])],
- progress2)
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/u1db/tests/test_test_infrastructure.py b/src/leap/soledad/u1db/tests/test_test_infrastructure.py
deleted file mode 100644
index b79e0516..00000000
--- a/src/leap/soledad/u1db/tests/test_test_infrastructure.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for test infrastructure bits"""
-
-from wsgiref import simple_server
-
-from u1db import (
- tests,
- )
-
-
-class TestTestCaseWithServer(tests.TestCaseWithServer):
-
- def make_app(self):
- return "app"
-
- @staticmethod
- def server_def():
- def make_server(host_port, application):
- assert application == "app"
- return simple_server.WSGIServer(host_port, None)
- return (make_server, "shutdown", "http")
-
- def test_getURL(self):
- self.startServer()
- url = self.getURL()
- self.assertTrue(url.startswith('http://127.0.0.1:'))
diff --git a/src/leap/soledad/u1db/tests/test_vectorclock.py b/src/leap/soledad/u1db/tests/test_vectorclock.py
deleted file mode 100644
index 72baf246..00000000
--- a/src/leap/soledad/u1db/tests/test_vectorclock.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""VectorClockRev helper class tests."""
-
-from u1db import tests, vectorclock
-
-try:
- from u1db.tests import c_backend_wrapper
-except ImportError:
- c_backend_wrapper = None
-
-
-c_vectorclock_scenarios = []
-if c_backend_wrapper is not None:
- c_vectorclock_scenarios.append(
- ('c', {'create_vcr': c_backend_wrapper.VectorClockRev}))
-
-
-class TestVectorClockRev(tests.TestCase):
-
- scenarios = [('py', {'create_vcr': vectorclock.VectorClockRev})
- ] + c_vectorclock_scenarios
-
- def assertIsNewer(self, newer_rev, older_rev):
- new_vcr = self.create_vcr(newer_rev)
- old_vcr = self.create_vcr(older_rev)
- self.assertTrue(new_vcr.is_newer(old_vcr))
- self.assertFalse(old_vcr.is_newer(new_vcr))
-
- def assertIsConflicted(self, rev_a, rev_b):
- vcr_a = self.create_vcr(rev_a)
- vcr_b = self.create_vcr(rev_b)
- self.assertFalse(vcr_a.is_newer(vcr_b))
- self.assertFalse(vcr_b.is_newer(vcr_a))
-
- def assertRoundTrips(self, rev):
- self.assertEqual(rev, self.create_vcr(rev).as_str())
-
- def test__is_newer_doc_rev(self):
- self.assertIsNewer('test:1', None)
- self.assertIsNewer('test:2', 'test:1')
- self.assertIsNewer('other:2|test:1', 'other:1|test:1')
- self.assertIsNewer('other:1|test:1', 'other:1')
- self.assertIsNewer('a:2|b:1', 'b:1')
- self.assertIsNewer('a:1|b:2', 'a:1')
- self.assertIsConflicted('other:2|test:1', 'other:1|test:2')
- self.assertIsConflicted('other:1|test:1', 'other:2')
- self.assertIsConflicted('test:1', 'test:1')
-
- def test_None(self):
- vcr = self.create_vcr(None)
- self.assertEqual('', vcr.as_str())
-
- def test_round_trips(self):
- self.assertRoundTrips('test:1')
- self.assertRoundTrips('a:1|b:2')
- self.assertRoundTrips('alternate:2|test:1')
-
- def test_handles_sort_order(self):
- self.assertEqual('a:1|b:2', self.create_vcr('b:2|a:1').as_str())
- # Last one out of place
- self.assertEqual('a:1|b:2|c:3|d:4|e:5|f:6',
- self.create_vcr('f:6|a:1|b:2|c:3|d:4|e:5').as_str())
- # Fully reversed
- self.assertEqual('a:1|b:2|c:3|d:4|e:5|f:6',
- self.create_vcr('f:6|e:5|d:4|c:3|b:2|a:1').as_str())
-
- def assertIncrement(self, original, replica_uid, after_increment):
- vcr = self.create_vcr(original)
- vcr.increment(replica_uid)
- self.assertEqual(after_increment, vcr.as_str())
-
- def test_increment(self):
- self.assertIncrement(None, 'test', 'test:1')
- self.assertIncrement('test:1', 'test', 'test:2')
-
- def test_increment_adds_uid(self):
- self.assertIncrement('other:1', 'test', 'other:1|test:1')
- self.assertIncrement('a:1|ab:2', 'aa', 'a:1|aa:1|ab:2')
-
- def test_increment_update_partial(self):
- self.assertIncrement('a:1|ab:2', 'a', 'a:2|ab:2')
- self.assertIncrement('a:2|ab:2', 'ab', 'a:2|ab:3')
-
- def test_increment_appends_uid(self):
- self.assertIncrement('b:2', 'c', 'b:2|c:1')
-
- def assertMaximize(self, rev1, rev2, maximized):
- vcr1 = self.create_vcr(rev1)
- vcr2 = self.create_vcr(rev2)
- vcr1.maximize(vcr2)
- self.assertEqual(maximized, vcr1.as_str())
- # reset vcr1 to maximize the other way
- vcr1 = self.create_vcr(rev1)
- vcr2.maximize(vcr1)
- self.assertEqual(maximized, vcr2.as_str())
-
- def test_maximize(self):
- self.assertMaximize(None, None, '')
- self.assertMaximize(None, 'x:1', 'x:1')
- self.assertMaximize('x:1', 'y:1', 'x:1|y:1')
- self.assertMaximize('x:2', 'x:1', 'x:2')
- self.assertMaximize('x:2', 'x:1|y:2', 'x:2|y:2')
- self.assertMaximize('a:1|c:2|e:3', 'b:3|d:4|f:5',
- 'a:1|b:3|c:2|d:4|e:3|f:5')
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/u1db/tests/testing-certs/Makefile b/src/leap/soledad/u1db/tests/testing-certs/Makefile
deleted file mode 100644
index 2385e75b..00000000
--- a/src/leap/soledad/u1db/tests/testing-certs/Makefile
+++ /dev/null
@@ -1,35 +0,0 @@
-CATOP=./demoCA
-ORIG_CONF=/usr/lib/ssl/openssl.cnf
-ELEVEN_YEARS=-days 4015
-
-init:
- cp $(ORIG_CONF) ca.conf
- install -d $(CATOP)
- install -d $(CATOP)/certs
- install -d $(CATOP)/crl
- install -d $(CATOP)/newcerts
- install -d $(CATOP)/private
- touch $(CATOP)/index.txt
- echo 01>$(CATOP)/crlnumber
- @echo '**** Making CA certificate ...'
- openssl req -nodes -new \
- -newkey rsa -keyout $(CATOP)/private/cakey.pem \
- -out $(CATOP)/careq.pem \
- -multivalue-rdn \
- -subj "/C=UK/ST=-/O=u1db LOCAL TESTING ONLY, DO NO TRUST/CN=u1db testing CA"
- openssl ca -config ./ca.conf -create_serial \
- -out $(CATOP)/cacert.pem $(ELEVEN_YEARS) -batch \
- -keyfile $(CATOP)/private/cakey.pem -selfsign \
- -extensions v3_ca -infiles $(CATOP)/careq.pem
-
-pems:
- cp ./demoCA/cacert.pem .
- openssl req -new -config ca.conf \
- -multivalue-rdn \
- -subj "/O=u1db LOCAL TESTING ONLY, DO NOT TRUST/CN=localhost" \
- -nodes -keyout testing.key -out newreq.pem $(ELEVEN_YEARS)
- openssl ca -batch -config ./ca.conf $(ELEVEN_YEARS) \
- -policy policy_anything \
- -out testing.cert -infiles newreq.pem
-
-.PHONY: init pems
diff --git a/src/leap/soledad/u1db/tests/testing-certs/cacert.pem b/src/leap/soledad/u1db/tests/testing-certs/cacert.pem
deleted file mode 100644
index c019a730..00000000
--- a/src/leap/soledad/u1db/tests/testing-certs/cacert.pem
+++ /dev/null
@@ -1,58 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number:
- e4:de:01:76:c4:78:78:7e
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=UK, ST=-, O=u1db LOCAL TESTING ONLY, DO NO TRUST, CN=u1db testing CA
- Validity
- Not Before: May 3 11:11:11 2012 GMT
- Not After : May 1 11:11:11 2023 GMT
- Subject: C=UK, ST=-, O=u1db LOCAL TESTING ONLY, DO NO TRUST, CN=u1db testing CA
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (1024 bit)
- Modulus:
- 00:bc:91:a5:7f:7d:37:f7:06:c7:db:5b:83:6a:6b:
- 63:c3:8b:5c:f7:84:4d:97:6d:d4:be:bf:e7:79:a8:
- c1:03:57:ec:90:d4:20:e7:02:95:d9:a6:49:e3:f9:
- 9a:ea:37:b9:b2:02:62:ab:40:d3:42:bb:4a:4e:a2:
- 47:71:0f:1d:a2:c5:94:a1:cf:35:d3:23:32:42:c0:
- 1e:8d:cb:08:58:fb:8a:5c:3e:ea:eb:d5:2c:ed:d6:
- aa:09:b4:b5:7d:e3:45:c9:ae:c2:82:b2:ae:c0:81:
- bc:24:06:65:a9:e7:e0:61:ac:25:ee:53:d3:d7:be:
- 22:f7:00:a2:ad:c6:0e:3a:39
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Subject Key Identifier:
- DB:3D:93:51:6C:32:15:54:8F:10:50:FC:49:4F:36:15:28:BB:95:6D
- X509v3 Authority Key Identifier:
- keyid:DB:3D:93:51:6C:32:15:54:8F:10:50:FC:49:4F:36:15:28:BB:95:6D
-
- X509v3 Basic Constraints:
- CA:TRUE
- Signature Algorithm: sha1WithRSAEncryption
- 72:9b:c1:f7:07:65:83:36:25:4e:01:2f:b7:4a:f2:a4:00:28:
- 80:c7:56:2c:32:39:90:13:61:4b:bb:12:c5:44:9d:42:57:85:
- 28:19:70:69:e1:43:c8:bd:11:f6:94:df:91:2d:c3:ea:82:8d:
- b4:8f:5d:47:a3:00:99:53:29:93:27:6c:c5:da:c1:20:6f:ab:
- ec:4a:be:34:f3:8f:02:e5:0c:c0:03:ac:2b:33:41:71:4f:0a:
- 72:5a:b4:26:1a:7f:81:bc:c0:95:8a:06:87:a8:11:9f:5c:73:
- 38:df:5a:69:40:21:29:ad:46:23:56:75:e1:e9:8b:10:18:4c:
- 7b:54
------BEGIN CERTIFICATE-----
-MIICkjCCAfugAwIBAgIJAOTeAXbEeHh+MA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV
-BAYTAlVLMQowCAYDVQQIDAEtMS0wKwYDVQQKDCR1MWRiIExPQ0FMIFRFU1RJTkcg
-T05MWSwgRE8gTk8gVFJVU1QxGDAWBgNVBAMMD3UxZGIgdGVzdGluZyBDQTAeFw0x
-MjA1MDMxMTExMTFaFw0yMzA1MDExMTExMTFaMGIxCzAJBgNVBAYTAlVLMQowCAYD
-VQQIDAEtMS0wKwYDVQQKDCR1MWRiIExPQ0FMIFRFU1RJTkcgT05MWSwgRE8gTk8g
-VFJVU1QxGDAWBgNVBAMMD3UxZGIgdGVzdGluZyBDQTCBnzANBgkqhkiG9w0BAQEF
-AAOBjQAwgYkCgYEAvJGlf3039wbH21uDamtjw4tc94RNl23Uvr/neajBA1fskNQg
-5wKV2aZJ4/ma6je5sgJiq0DTQrtKTqJHcQ8dosWUoc810yMyQsAejcsIWPuKXD7q
-69Us7daqCbS1feNFya7CgrKuwIG8JAZlqefgYawl7lPT174i9wCircYOOjkCAwEA
-AaNQME4wHQYDVR0OBBYEFNs9k1FsMhVUjxBQ/ElPNhUou5VtMB8GA1UdIwQYMBaA
-FNs9k1FsMhVUjxBQ/ElPNhUou5VtMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEF
-BQADgYEAcpvB9wdlgzYlTgEvt0rypAAogMdWLDI5kBNhS7sSxUSdQleFKBlwaeFD
-yL0R9pTfkS3D6oKNtI9dR6MAmVMpkydsxdrBIG+r7Eq+NPOPAuUMwAOsKzNBcU8K
-clq0Jhp/gbzAlYoGh6gRn1xzON9aaUAhKa1GI1Z14emLEBhMe1Q=
------END CERTIFICATE-----
diff --git a/src/leap/soledad/u1db/tests/testing-certs/testing.cert b/src/leap/soledad/u1db/tests/testing-certs/testing.cert
deleted file mode 100644
index 985684fb..00000000
--- a/src/leap/soledad/u1db/tests/testing-certs/testing.cert
+++ /dev/null
@@ -1,61 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number:
- e4:de:01:76:c4:78:78:7f
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=UK, ST=-, O=u1db LOCAL TESTING ONLY, DO NO TRUST, CN=u1db testing CA
- Validity
- Not Before: May 3 11:11:14 2012 GMT
- Not After : May 1 11:11:14 2023 GMT
- Subject: O=u1db LOCAL TESTING ONLY, DO NOT TRUST, CN=localhost
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (1024 bit)
- Modulus:
- 00:c6:1d:72:d3:c5:e4:fc:d1:4c:d9:e4:08:3e:90:
- 10:ce:3f:1f:87:4a:1d:4f:7f:2a:5a:52:c9:65:4f:
- d9:2c:bf:69:75:18:1a:b5:c9:09:32:00:47:f5:60:
- aa:c6:dd:3a:87:37:5f:16:be:de:29:b5:ea:fc:41:
- 7e:eb:77:bb:df:63:c3:06:1e:ed:e9:a0:67:1a:f1:
- ec:e1:9d:f7:9c:8f:1c:fa:c3:66:7b:39:dc:70:ae:
- 09:1b:9c:c0:9a:c4:90:77:45:8e:39:95:a9:2f:92:
- 43:bd:27:07:5a:99:51:6e:76:a0:af:dd:b1:2c:8f:
- ca:8b:8c:47:0d:f6:6e:fc:69
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Basic Constraints:
- CA:FALSE
- Netscape Comment:
- OpenSSL Generated Certificate
- X509v3 Subject Key Identifier:
- 1C:63:85:E1:1D:F3:89:2E:6C:4E:3F:FB:D0:10:64:5A:C1:22:6A:2A
- X509v3 Authority Key Identifier:
- keyid:DB:3D:93:51:6C:32:15:54:8F:10:50:FC:49:4F:36:15:28:BB:95:6D
-
- Signature Algorithm: sha1WithRSAEncryption
- 1d:6d:3e:bd:93:fd:bd:3e:17:b8:9f:f0:99:7f:db:50:5c:b2:
- 01:42:03:b5:d5:94:05:d3:f6:8e:80:82:55:47:1f:58:f2:18:
- 6c:ab:ef:43:2c:2f:10:e1:7c:c4:5c:cc:ac:50:50:22:42:aa:
- 35:33:f5:b9:f3:a6:66:55:d9:36:f4:f2:e4:d4:d9:b5:2c:52:
- 66:d4:21:17:97:22:b8:9b:d7:0e:7c:3d:ce:85:19:ca:c4:d2:
- 58:62:31:c6:18:3e:44:fc:f4:30:b6:95:87:ee:21:4a:08:f0:
- af:3c:8f:c4:ba:5e:a1:5c:37:1a:7d:7b:fe:66:ae:62:50:17:
- 31:ca
------BEGIN CERTIFICATE-----
-MIICnzCCAgigAwIBAgIJAOTeAXbEeHh/MA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV
-BAYTAlVLMQowCAYDVQQIDAEtMS0wKwYDVQQKDCR1MWRiIExPQ0FMIFRFU1RJTkcg
-T05MWSwgRE8gTk8gVFJVU1QxGDAWBgNVBAMMD3UxZGIgdGVzdGluZyBDQTAeFw0x
-MjA1MDMxMTExMTRaFw0yMzA1MDExMTExMTRaMEQxLjAsBgNVBAoMJXUxZGIgTE9D
-QUwgVEVTVElORyBPTkxZLCBETyBOT1QgVFJVU1QxEjAQBgNVBAMMCWxvY2FsaG9z
-dDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAxh1y08Xk/NFM2eQIPpAQzj8f
-h0odT38qWlLJZU/ZLL9pdRgatckJMgBH9WCqxt06hzdfFr7eKbXq/EF+63e732PD
-Bh7t6aBnGvHs4Z33nI8c+sNmeznccK4JG5zAmsSQd0WOOZWpL5JDvScHWplRbnag
-r92xLI/Ki4xHDfZu/GkCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0E
-HxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFBxjheEd
-84kubE4/+9AQZFrBImoqMB8GA1UdIwQYMBaAFNs9k1FsMhVUjxBQ/ElPNhUou5Vt
-MA0GCSqGSIb3DQEBBQUAA4GBAB1tPr2T/b0+F7if8Jl/21BcsgFCA7XVlAXT9o6A
-glVHH1jyGGyr70MsLxDhfMRczKxQUCJCqjUz9bnzpmZV2Tb08uTU2bUsUmbUIReX
-Irib1w58Pc6FGcrE0lhiMcYYPkT89DC2lYfuIUoI8K88j8S6XqFcNxp9e/5mrmJQ
-FzHK
------END CERTIFICATE-----
diff --git a/src/leap/soledad/u1db/tests/testing-certs/testing.key b/src/leap/soledad/u1db/tests/testing-certs/testing.key
deleted file mode 100644
index d83d4920..00000000
--- a/src/leap/soledad/u1db/tests/testing-certs/testing.key
+++ /dev/null
@@ -1,16 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAMYdctPF5PzRTNnk
-CD6QEM4/H4dKHU9/KlpSyWVP2Sy/aXUYGrXJCTIAR/VgqsbdOoc3Xxa+3im16vxB
-fut3u99jwwYe7emgZxrx7OGd95yPHPrDZns53HCuCRucwJrEkHdFjjmVqS+SQ70n
-B1qZUW52oK/dsSyPyouMRw32bvxpAgMBAAECgYBs3lXxhjg1rhabTjIxnx19GTcM
-M3Az9V+izweZQu3HJ1CeZiaXauhAr+LbNsniCkRVddotN6oCJdQB10QVxXBZc9Jz
-HPJ4zxtZfRZlNMTMmG7eLWrfxpgWnb/BUjDb40yy1nhr9yhDUnI/8RoHDRHnAEHZ
-/CnHGUrqcVcrY5zJAQJBAPLhBJg9W88JVmcOKdWxRgs7dLHnZb999Kv1V5mczmAi
-jvGvbUmucqOqke6pTUHNYyNHqU6pySzGUi2cH+BAkFECQQDQ0VoAOysg6FVoT15v
-tGh57t5sTiCZZ7PS8jwvtThsgA+vcf6c16XWzXgjGXSap4r2QDOY2rI5lsWLaQ8T
-+fyZAkAfyFJRmbXp4c7srW3MCOahkaYzoZQu+syJtBFCiMJ40gzik5I5khpuUGPI
-V19EvRu8AiSlppIsycb3MPb64XgBAkEAy7DrUf5le5wmc7G4NM6OeyJ+5LbxJbL6
-vnJ8My1a9LuWkVVpQCU7J+UVo2dZTuLPspW9vwTVhUeFOxAoHRxlQQJAFem93f7m
-el2BkB2EFqU3onPejkZ5UrDmfmeOQR1axMQNSXqSxcJxqa16Ru1BWV2gcWRbwajQ
-oc+kuJThu/r/Ug==
------END PRIVATE KEY-----