summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorTomás Touceda <chiiph@leap.se>2013-03-06 15:27:23 -0300
committerTomás Touceda <chiiph@leap.se>2013-03-06 15:27:23 -0300
commit5ff29dc57e2877a14e705d09b7042cddf4165d0a (patch)
tree18e5817a105f0aaa7a9a752f7b644e44a6c867bc
parente23553caaf93a734578b02f9130dee38161d0e22 (diff)
Remove everything to start from scratch
-rw-r--r--MANIFEST.in4
-rw-r--r--Makefile69
-rw-r--r--NEWS.rst69
-rw-r--r--README.rst120
-rw-r--r--data/images/Dialog-accept.pngbin1316 -> 0 bytes
-rw-r--r--data/images/Dialog-error.pngbin1380 -> 0 bytes
-rw-r--r--data/images/Emblem-question.pngbin1260 -> 0 bytes
-rw-r--r--data/images/conn_connected.pngbin426 -> 0 bytes
-rw-r--r--data/images/conn_connecting.pngbin712 -> 0 bytes
-rw-r--r--data/images/conn_error.pngbin1429 -> 0 bytes
-rw-r--r--data/images/favicon.icobin318 -> 0 bytes
-rw-r--r--data/images/leap-client.icnsbin27456 -> 0 bytes
-rw-r--r--data/images/leap-color-small.pngbin10100 -> 0 bytes
-rw-r--r--data/leap_client.pro25
-rwxr-xr-xdata/mkpyqt.py271
-rw-r--r--data/resources/locale.qrc6
-rw-r--r--data/resources/mainwindow.qrc11
-rw-r--r--data/translations/README.rst8
-rw-r--r--data/translations/de.qmbin6036 -> 0 bytes
-rw-r--r--data/translations/de.ts218
-rw-r--r--data/translations/es.qmbin6194 -> 0 bytes
-rw-r--r--data/translations/es.ts218
-rw-r--r--data/ts/README.rst14
-rw-r--r--data/ts/en_US.ts477
-rw-r--r--docs/COPYING674
-rw-r--r--docs/Makefile153
-rw-r--r--docs/api/leap.base.rst98
-rw-r--r--docs/api/leap.base.tests.rst43
-rw-r--r--docs/api/leap.baseapp.rst75
-rw-r--r--docs/api/leap.certs.rst11
-rw-r--r--docs/api/leap.crypto.rst26
-rw-r--r--docs/api/leap.crypto.tests.rst11
-rw-r--r--docs/api/leap.eip.rst74
-rw-r--r--docs/api/leap.eip.tests.rst43
-rw-r--r--docs/api/leap.gui.firstrun.rst91
-rw-r--r--docs/api/leap.gui.rst75
-rw-r--r--docs/api/leap.gui.tests.rst59
-rw-r--r--docs/api/leap.rst41
-rw-r--r--docs/api/leap.testing.rst43
-rw-r--r--docs/api/leap.util.rst66
-rw-r--r--docs/api/leap.util.tests.rst19
-rw-r--r--docs/api/modules.rst7
-rw-r--r--docs/conf.py242
-rw-r--r--docs/config/files.rst16
-rw-r--r--docs/dev/authors.rst0
-rw-r--r--docs/dev/environment.rst121
-rw-r--r--docs/dev/internals.rst12
-rw-r--r--docs/dev/internationalization.rst117
-rw-r--r--docs/dev/resources.rst14
-rw-r--r--docs/dev/tests.rst62
-rw-r--r--docs/dev/todo.rst0
-rw-r--r--docs/dev/workflow.rst41
-rw-r--r--docs/index.rst92
-rw-r--r--docs/make.bat190
-rw-r--r--docs/man/leap.183
-rw-r--r--docs/man/leap.1.rst86
-rw-r--r--docs/pkg/debian.rst28
-rw-r--r--docs/pkg/osx.rst0
-rw-r--r--docs/pkg/win.rst0
-rw-r--r--docs/testers/howto.rst111
-rw-r--r--docs/user/gpl.pngbin3471 -> 0 bytes
-rw-r--r--docs/user/install.rst47
-rw-r--r--docs/user/intro.rst101
-rw-r--r--docs/user/running.rst40
-rw-r--r--openvpn/README6
-rw-r--r--openvpn/Sources4
-rwxr-xr-xopenvpn/build.zsh191
-rw-r--r--pkg/__init__.py0
-rw-r--r--pkg/branding/__init__.py15
-rw-r--r--pkg/branding/config.py11
-rw-r--r--pkg/dev-reqs.pip4
-rw-r--r--pkg/distribute_setup.py515
-rwxr-xr-xpkg/install_pyqt.sh10
-rw-r--r--pkg/install_venv.py247
-rw-r--r--pkg/linux/README4
-rw-r--r--pkg/linux/leap.desktop13
-rw-r--r--pkg/linux/polkit/net.openvpn.gui.leap.policy23
-rwxr-xr-xpkg/linux/resolv-update90
-rw-r--r--pkg/osx/Info.plist22
-rw-r--r--pkg/osx/Makefile46
-rw-r--r--pkg/osx/README.rst60
-rw-r--r--pkg/osx/install/ProcessNetworkChanges.plist.template16
-rwxr-xr-xpkg/osx/install/client.down.sh146
-rwxr-xr-xpkg/osx/install/client.up.sh596
-rwxr-xr-xpkg/osx/install/install-leapc.sh17
-rw-r--r--pkg/osx/install/leap-installer.platypus90
-rw-r--r--pkg/osx/leap-client.spec36
-rwxr-xr-xpkg/postmkvenv.sh40
-rw-r--r--pkg/requirements.pip33
-rw-r--r--pkg/scripts/leap_client_bootstrap.sh50
-rw-r--r--pkg/test-requirements.pip14
-rwxr-xr-xpkg/tools/with_venv.sh4
-rw-r--r--pkg/utils.py42
-rwxr-xr-xrun_tests.sh154
-rw-r--r--setup.cfg2
-rwxr-xr-xsetup.py222
-rw-r--r--src/leap/__init__.py36
-rw-r--r--src/leap/_version.py197
-rw-r--r--src/leap/app.py120
-rw-r--r--src/leap/base/__init__.py0
-rw-r--r--src/leap/base/auth.py355
-rw-r--r--src/leap/base/authentication.py11
-rw-r--r--src/leap/base/checks.py213
-rw-r--r--src/leap/base/config.py348
-rw-r--r--src/leap/base/connection.py115
-rw-r--r--src/leap/base/constants.py42
-rw-r--r--src/leap/base/exceptions.py97
-rw-r--r--src/leap/base/network.py107
-rw-r--r--src/leap/base/pluggableconfig.py455
-rw-r--r--src/leap/base/providers.py29
-rw-r--r--src/leap/base/specs.py67
-rw-r--r--src/leap/base/tests/__init__.py0
-rw-r--r--src/leap/base/tests/test_auth.py58
-rw-r--r--src/leap/base/tests/test_checks.py177
-rw-r--r--src/leap/base/tests/test_config.py247
-rw-r--r--src/leap/base/tests/test_providers.py150
-rw-r--r--src/leap/base/tests/test_validation.py92
-rw-r--r--src/leap/baseapp/__init__.py0
-rw-r--r--src/leap/baseapp/constants.py6
-rw-r--r--src/leap/baseapp/dialogs.py61
-rw-r--r--src/leap/baseapp/eip.py243
-rw-r--r--src/leap/baseapp/leap_app.py153
-rw-r--r--src/leap/baseapp/log.py69
-rw-r--r--src/leap/baseapp/mainwindow.py191
-rw-r--r--src/leap/baseapp/network.py63
-rw-r--r--src/leap/baseapp/permcheck.py17
-rw-r--r--src/leap/baseapp/systray.py268
-rw-r--r--src/leap/certs/__init__.py7
-rw-r--r--src/leap/crypto/__init__.py0
-rw-r--r--src/leap/crypto/certs.py112
-rw-r--r--src/leap/crypto/certs_gnutls.py112
-rw-r--r--src/leap/crypto/leapkeyring.py70
-rw-r--r--src/leap/crypto/tests/__init__.py0
-rw-r--r--src/leap/crypto/tests/test_certs.py22
-rw-r--r--src/leap/eip/__init__.py0
-rw-r--r--src/leap/eip/checks.py537
-rw-r--r--src/leap/eip/config.py398
-rw-r--r--src/leap/eip/constants.py3
-rw-r--r--src/leap/eip/eipconnection.py405
-rw-r--r--src/leap/eip/exceptions.py175
-rw-r--r--src/leap/eip/openvpnconnection.py410
-rw-r--r--src/leap/eip/specs.py136
-rw-r--r--src/leap/eip/tests/__init__.py0
-rw-r--r--src/leap/eip/tests/data.py51
-rw-r--r--src/leap/eip/tests/test_checks.py373
-rw-r--r--src/leap/eip/tests/test_config.py298
-rw-r--r--src/leap/eip/tests/test_eipconnection.py216
-rw-r--r--src/leap/eip/tests/test_openvpnconnection.py161
-rw-r--r--src/leap/eip/udstelnet.py38
-rw-r--r--src/leap/email/__init__.py0
-rw-r--r--src/leap/email/smtp/README43
-rw-r--r--src/leap/email/smtp/__init__.py0
-rw-r--r--src/leap/email/smtp/smtprelay.py207
-rw-r--r--src/leap/email/smtp/tests/185CA770.key79
-rw-r--r--src/leap/email/smtp/tests/185CA770.pub52
-rw-r--r--src/leap/email/smtp/tests/__init__.py215
-rw-r--r--src/leap/email/smtp/tests/mail.txt10
-rw-r--r--src/leap/email/smtp/tests/test_smtprelay.py75
-rw-r--r--src/leap/gui/__init__.py11
-rw-r--r--src/leap/gui/constants.py13
-rw-r--r--src/leap/gui/firstrun/__init__.py28
-rw-r--r--src/leap/gui/firstrun/connect.py214
-rw-r--r--src/leap/gui/firstrun/constants.py0
-rw-r--r--src/leap/gui/firstrun/intro.py68
-rw-r--r--src/leap/gui/firstrun/last.py119
-rw-r--r--src/leap/gui/firstrun/login.py332
-rw-r--r--src/leap/gui/firstrun/mixins.py18
-rw-r--r--src/leap/gui/firstrun/providerinfo.py106
-rw-r--r--src/leap/gui/firstrun/providerselect.py471
-rw-r--r--src/leap/gui/firstrun/providersetup.py157
-rw-r--r--src/leap/gui/firstrun/register.py387
-rwxr-xr-xsrc/leap/gui/firstrun/tests/integration/fake_provider.py302
-rwxr-xr-xsrc/leap/gui/firstrun/wizard.py309
-rw-r--r--src/leap/gui/locale_rc.py813
-rw-r--r--src/leap/gui/mainwindow_rc.py1130
-rw-r--r--src/leap/gui/progress.py488
-rw-r--r--src/leap/gui/styles.py16
-rw-r--r--src/leap/gui/tests/__init__.py0
-rw-r--r--src/leap/gui/tests/integration/fake_user_signup.py84
-rw-r--r--src/leap/gui/tests/test_firstrun_login.py212
-rw-r--r--src/leap/gui/tests/test_firstrun_providerselect.py203
-rw-r--r--src/leap/gui/tests/test_firstrun_register.py244
-rw-r--r--src/leap/gui/tests/test_firstrun_wizard.py137
-rw-r--r--src/leap/gui/tests/test_mainwindow_rc.py32
-rw-r--r--src/leap/gui/tests/test_progress.py449
-rw-r--r--src/leap/gui/tests/test_threads.py27
-rw-r--r--src/leap/gui/threads.py21
-rw-r--r--src/leap/gui/utils.py34
-rw-r--r--src/leap/soledad/README35
-rw-r--r--src/leap/soledad/__init__.py221
-rw-r--r--src/leap/soledad/backends/__init__.py5
-rw-r--r--src/leap/soledad/backends/couch.py269
-rw-r--r--src/leap/soledad/backends/leap_backend.py193
-rw-r--r--src/leap/soledad/backends/objectstore.py114
-rw-r--r--src/leap/soledad/backends/sqlcipher.py176
-rw-r--r--src/leap/soledad/server.py20
-rw-r--r--src/leap/soledad/tests/__init__.py199
-rw-r--r--src/leap/soledad/tests/couchdb.ini.template222
-rw-r--r--src/leap/soledad/tests/test_couch.py298
-rw-r--r--src/leap/soledad/tests/test_encrypted.py15
-rw-r--r--src/leap/soledad/tests/test_leap_backend.py207
-rw-r--r--src/leap/soledad/tests/test_sqlcipher.py501
-rw-r--r--src/leap/soledad/tests/u1db_tests/README34
-rw-r--r--src/leap/soledad/tests/u1db_tests/__init__.py421
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_backends.py1907
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_document.py150
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_http_app.py1135
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_http_client.py363
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_http_database.py260
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_https.py117
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_open.py69
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_remote_sync_target.py317
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_sqlite_backend.py494
-rw-r--r--src/leap/soledad/tests/u1db_tests/test_sync.py1242
-rw-r--r--src/leap/soledad/tests/u1db_tests/testing-certs/Makefile35
-rw-r--r--src/leap/soledad/tests/u1db_tests/testing-certs/cacert.pem58
-rw-r--r--src/leap/soledad/tests/u1db_tests/testing-certs/testing.cert61
-rw-r--r--src/leap/soledad/tests/u1db_tests/testing-certs/testing.key16
-rw-r--r--src/leap/soledad/util.py55
-rw-r--r--src/leap/testing/__init__.py0
-rw-r--r--src/leap/testing/basetest.py85
-rw-r--r--src/leap/testing/cacert.pem23
-rw-r--r--src/leap/testing/https_server.py68
-rw-r--r--src/leap/testing/leaptestscert.pem84
-rw-r--r--src/leap/testing/leaptestskey.pem27
-rw-r--r--src/leap/testing/pyqt.py52
-rw-r--r--src/leap/testing/qunittest.py302
-rw-r--r--src/leap/testing/test_basetest.py91
-rw-r--r--src/leap/util/__init__.py9
-rw-r--r--src/leap/util/certs.py18
-rw-r--r--src/leap/util/coroutines.py109
-rw-r--r--src/leap/util/dicts.py268
-rw-r--r--src/leap/util/fileutil.py120
-rw-r--r--src/leap/util/geo.py32
-rw-r--r--src/leap/util/leap_argparse.py44
-rw-r--r--src/leap/util/misc.py37
-rw-r--r--src/leap/util/tests/__init__.py0
-rw-r--r--src/leap/util/tests/test_fileutil.py100
-rw-r--r--src/leap/util/tests/test_leap_argparse.py35
-rw-r--r--src/leap/util/tests/test_translations.py22
-rw-r--r--src/leap/util/translations.py82
-rw-r--r--src/leap/util/web.py40
-rw-r--r--tests/README1
-rw-r--r--tests/__init__.py0
-rw-r--r--tests/test_qt_environment.py43
-rw-r--r--tox.ini12
-rw-r--r--versioneer.py656
247 files changed, 0 insertions, 33431 deletions
diff --git a/MANIFEST.in b/MANIFEST.in
deleted file mode 100644
index d7a5201e..00000000
--- a/MANIFEST.in
+++ /dev/null
@@ -1,4 +0,0 @@
-include pkg/*
-include pkg/branding/*
-include docs/*
-include versioneer.py
diff --git a/Makefile b/Makefile
deleted file mode 100644
index cfcd47a1..00000000
--- a/Makefile
+++ /dev/null
@@ -1,69 +0,0 @@
-SHELL := /bin/zsh
-# ################################
-# Makefile for compiling resources
-# files.
-# TODO move to setup scripts
-# and implement it in python
-# http://die-offenbachs.homelinux.org:48888/hg/eric5/file/5072605ad4dd/compileUiFiles.py
-###### EDIT ######################
-
-#Directory with ui and resource files
-RESOURCE_DIR = data/resources
-
-#Directory for compiled resources
-COMPILED_DIR = src/leap/gui
-
-#Directory for (finished) translations
-TRANSLAT_DIR = data/translations
-
-#Project file, used for translations
-PROJFILE = data/leap_client.pro
-
-#UI files to compile
-# UI_FILES = foo.ui
-UI_FILES =
-#Qt resource files to compile
-#images.qrc
-RESOURCES = mainwindow.qrc locale.qrc
-
-#pyuic4 and pyrcc4 binaries
-PYUIC = pyuic4
-PYRCC = pyrcc4
-PYLUP = pylupdate4
-LRELE = lrelease
-
-
-#################################
-# DO NOT EDIT FOLLOWING
-
-COMPILED_UI = $(UI_FILES:%.ui=$(COMPILED_DIR)/ui_%.py)
-COMPILED_RESOURCES = $(RESOURCES:%.qrc=$(COMPILED_DIR)/%_rc.py)
-
-DEBVER = $(shell dpkg-parsechangelog | sed -ne 's,Version: ,,p')
-
-#
-
-all : resources ui
-
-resources : $(COMPILED_RESOURCES)
-
-ui : $(COMPILED_UI)
-
-translations:
- $(PYLUP) $(PROJFILE)
- $(LRELE) $(TRANSLAT_DIR)/*.ts
-
-$(COMPILED_DIR)/ui_%.py : $(RESOURCE_DIR)/%.ui
- $(PYUIC) $< -o $@
-
-$(COMPILED_DIR)/%_rc.py : $(RESOURCE_DIR)/%.qrc
- $(PYRCC) $< -o $@
-
-manpages:
- rst2man docs/man/leap.1.rst docs/man/leap.1
-
-apidocs:
- @sphinx-apidoc -o docs/api src/leap
-
-clean :
- $(RM) $(COMPILED_UI) $(COMPILED_RESOURCES) $(COMPILED_UI:.py=.pyc) $(COMPILED_RESOURCES:.py=.pyc)
diff --git a/NEWS.rst b/NEWS.rst
deleted file mode 100644
index 00350cbb..00000000
--- a/NEWS.rst
+++ /dev/null
@@ -1,69 +0,0 @@
-==================================
-User-facing changes in Leap Client
-==================================
-
-Release 0.2.0 (2012-10-XX)
---------------------------
-
-This release is a functionally working version in Debian Squeeze and Ubuntu 12.04.
-It is able to connect to a preconfigured leap provider and autoconfigures a EIP connection.
-
-Python Support
-''''''''''''''
-This release supports Python2.6 and Python2.7
-
-New Features
-''''''''''''
-- Branded build: the final package includes branding info needed to connect to a default provider.
-- First run wizard: allows to register an user with the selected provider. It also downloads all
- the config files needed to connect to the eip service on this provider.
-- Network checks: we do some basic network testing and warn user in case we cannot find a
- suitable network interface, or if the virtual interface dissapears after a successful eip connection.
-- Debug mode and logfiles: the leap-client script allows to be invoked with the --debug flag.
- It also accepts a --logfile option that is useful for documenting bug reports.
-
-Dependencies
-''''''''''''
-See the ``README.rst`` for a step-to-step install guide.
-
-The following libraries are needed:
-
-- PyQt4
-- libgnutls
-- openvpn
-
-for building the package dependencies, you will need also:
-
-- python-setuptools
-- python-dev
-- libgnutls-dev
-
-Leap-Client depends on the following python packages:
-
-- requests
-- ping
-- psutil
-- netifaces
-- jsonschema
-- srp
-- pycrypto
-- keyring
-- python-gnutls==1.1.9
-
-We are freezing the python-gnutls library dependency for this release due to a bug in ubuntu, see:
-https://bugs.launchpad.net/ubuntu/+source/python-gnutls/+bug/1027129
-
-
-Configuration files
-'''''''''''''''''''
-
-Config files are created under ``~/.config/leap``
-Currently user should be able to completely remove this folder and have it auto-generated in the first run.
-
-- Current eip service config is stored in ``eip.json``
-- Under ``.config/leap/providers``, there is a per-provider folder that contains:
- - ``provider.json``, with all options for connecting to this provider.
- - ``eip-service.json``, with eip-specific configuration options,
- - ``keys/ca``, for a copy of the ca certificates used in the tls connections to provider.
- - ``keys/client``, for a local copy of leap user certificates used in the eip connection.
-- ``leap.conf`` for general application configurations (gui windows geometry, ...).
diff --git a/README.rst b/README.rst
deleted file mode 100644
index 9ef3f99b..00000000
--- a/README.rst
+++ /dev/null
@@ -1,120 +0,0 @@
-=========================================
-The LEAP Encryption Access Project Client
-=========================================
-
-*your internet encryption toolkit*
-
-Read the docs!
-==============
-
-You can read the documentation online at `http://leap-client.readthedocs.org <http://leap-client.readthedocs.org/en/latest/>`_. If you prefer to build it locally, run::
-
- $ cd docs
- $ make html
-
-Quick Start
-==============
-
-At the current development stage we still do not have any versioned release. Instead, you might want to have a look at the `testers guide <http://leap-client.readthedocs.org/en/latest/testers/howto.html>`_ for a quick howto on fetching and testing latest development code.
-
-Dependencies
-------------------
-
-LEAP Client depends on these libraries:
-
-* ``python 2.6`` or ``2.7``
-* ``qt4 libraries``
-* ``libgnutls``
-* ``openvpn``
-
-Python packages are listed in ``pkg/requirements.pip`` and ``pkg/test-requirements.pip``
-
-Debian
-^^^^^^
-
-Under a debian-based system, you can run::
-
- $ apt-get install openvpn python-qt4 python-crypto python-requests python-gnutls
-
-For *testing*::
-
- $ apt-get install python-nose python-mock python-coverage
-
-For *building* the package you will need to install also::
-
- $ apt-get install pyqt4-dev-tools libgnutls-dev python-setuptools python-all-dev
-
-
-pip
-^^^
-
-Use pip to install the required python packages::
-
- $ apt-get install python-pip python-dev libgnutls-dev
- $ pip install -r pkg/requirements.pip
-
-
-Installing
------------
-
-After getting the source and installing all the dependencies, proceed to install ``leap-client`` package::
-
- $ python setup.py install
-
-
-Running
--------
-
-After a successful installation, there should be a launcher called ``leap-client`` somewhere in your path::
-
- $ leap-client
-
-
-Hacking
-=======
-
-See the `hackers guide <http://leap-client.readthedocs.org/en/latest/dev/environment.html>`_.
-
-The LEAP client git repository is available at::
-
- git://leap.se/leap_client
-
-Some steps need to be run when setting a development environment for the first time.
-
-Enable a **virtualenv** to isolate your libraries. (Current *.gitignore* knows about a virtualenv in the root tree. If you do not like that place, just change ``.`` for *<path.to.environment>*)::
-
- $ virtualenv .
- $ source bin/activate
-
-Make sure you are in the development branch::
-
- (leap_client)$ git checkout develop
-
-Symlink your global pyqt libraries::
-
- (leap_client)$ pkg/postmkvenv.sh
-
-And make your working tree available to your pythonpath::
-
- (leap_client)$ python setup.py develop
-
-
-Testing
-=======
-
-Have a look at ``pkg/test-requirements.pip`` for the tests dependencies.
-
-To run the test suite::
-
- $ ./run_tests.sh
-
-which the first time should automagically install all the needed dependencies in your virtualenv for you.
-
-License
-=======
-
-.. image:: https://raw.github.com/leapcode/leap_client/develop/docs/user/gpl.png
-
-The LEAP Client is released under the terms of the `GNU GPL version 3`_ or later.
-
-.. _`GNU GPL version 3`: http://www.gnu.org/licenses/gpl.txt
diff --git a/data/images/Dialog-accept.png b/data/images/Dialog-accept.png
deleted file mode 100644
index 5a8a0bdb..00000000
--- a/data/images/Dialog-accept.png
+++ /dev/null
Binary files differ
diff --git a/data/images/Dialog-error.png b/data/images/Dialog-error.png
deleted file mode 100644
index 51da2f5b..00000000
--- a/data/images/Dialog-error.png
+++ /dev/null
Binary files differ
diff --git a/data/images/Emblem-question.png b/data/images/Emblem-question.png
deleted file mode 100644
index b2163e5b..00000000
--- a/data/images/Emblem-question.png
+++ /dev/null
Binary files differ
diff --git a/data/images/conn_connected.png b/data/images/conn_connected.png
deleted file mode 100644
index a5d20497..00000000
--- a/data/images/conn_connected.png
+++ /dev/null
Binary files differ
diff --git a/data/images/conn_connecting.png b/data/images/conn_connecting.png
deleted file mode 100644
index 31b6e617..00000000
--- a/data/images/conn_connecting.png
+++ /dev/null
Binary files differ
diff --git a/data/images/conn_error.png b/data/images/conn_error.png
deleted file mode 100644
index 85669af6..00000000
--- a/data/images/conn_error.png
+++ /dev/null
Binary files differ
diff --git a/data/images/favicon.ico b/data/images/favicon.ico
deleted file mode 100644
index b5f3505a..00000000
--- a/data/images/favicon.ico
+++ /dev/null
Binary files differ
diff --git a/data/images/leap-client.icns b/data/images/leap-client.icns
deleted file mode 100644
index d5d52cdc..00000000
--- a/data/images/leap-client.icns
+++ /dev/null
Binary files differ
diff --git a/data/images/leap-color-small.png b/data/images/leap-color-small.png
deleted file mode 100644
index bc9d4e7f..00000000
--- a/data/images/leap-color-small.png
+++ /dev/null
Binary files differ
diff --git a/data/leap_client.pro b/data/leap_client.pro
deleted file mode 100644
index 57764a23..00000000
--- a/data/leap_client.pro
+++ /dev/null
@@ -1,25 +0,0 @@
-# qmake file
-
-# is not there a f*** way of expanding this? other to template with python I mean...
-
-SOURCES += ../src/leap/base/exceptions.py
-SOURCES += ../src/leap/eip/exceptions.py
-SOURCES += ../src/leap/baseapp/eip.py
-SOURCES += ../src/leap/baseapp/log.py
-SOURCES += ../src/leap/baseapp/systray.py
-SOURCES += ../src/leap/gui/firstrun/intro.py
-SOURCES += ../src/leap/gui/firstrun/last.py
-SOURCES += ../src/leap/gui/firstrun/login.py
-SOURCES += ../src/leap/gui/firstrun/providerinfo.py
-SOURCES += ../src/leap/gui/firstrun/providerselect.py
-SOURCES += ../src/leap/gui/firstrun/providersetup.py
-SOURCES += ../src/leap/gui/firstrun/register.py
-SOURCES += ../src/leap/gui/firstrun/connect.py
-SOURCES += ../src/leap/gui/firstrun/wizard.py
-
-# where to generate ts files -- tx will pick from here
-
-# original file, english
-
-TRANSLATIONS += ts/en_US.ts
-
diff --git a/data/mkpyqt.py b/data/mkpyqt.py
deleted file mode 100755
index 1ce2cd28..00000000
--- a/data/mkpyqt.py
+++ /dev/null
@@ -1,271 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2007-10 Qtrac Ltd. All rights reserved.
-# This program or module is free software: you can redistribute it and/or
-# modify it under the terms of the GNU General Public License as published
-# by the Free Software Foundation, either version 2 of the License, or
-# version 3 of the License, or (at your option) any later version. It is
-# provided for educational purposes and is distributed in the hope that
-# it will be useful, but WITHOUT ANY WARRANTY; without even the implied
-# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See
-# the GNU General Public License for more details.
-
-import os
-import platform
-import stat
-import subprocess
-import sys
-import PyQt4.QtCore
-
-__version__ = "1.0.4"
-
-Windows = sys.platform.lower().startswith(("win", "microsoft"))
-if Windows:
- PATH = os.path.join(os.path.dirname(sys.executable),
- "Lib/site-packages/PyQt4")
- if os.access(os.path.join(PATH, "bin"), os.R_OK):
- PATH = os.path.join(PATH, "bin")
-else:
- app = PyQt4.QtCore.QCoreApplication([])
- PATH = unicode(app.applicationDirPath())
- del app
-if sys.platform.startswith("darwin"):
- i = PATH.find("Resources")
- if i > -1:
- PATH = PATH[:i] + "bin"
-PYUIC4 = os.path.join(PATH, "pyuic4") # e.g. PYUIC4 = "/usr/bin/pyuic4"
-if sys.platform.startswith("darwin"):
- PYUIC4 = os.path.dirname(sys.executable)
- i = PYUIC4.find("Resources")
- if i > -1:
- PYUIC4 = PYUIC4[:i] + "Lib/python2.5/site-packages/PyQt4/uic/pyuic.py"
-PYRCC4 = os.path.join(PATH, "pyrcc4")
-PYLUPDATE4 = os.path.join(PATH, "pylupdate4")
-LRELEASE = "lrelease"
-if Windows:
- PYUIC4 = PYUIC4.replace("/", "\\") + ".bat"
- PYRCC4 = PYRCC4.replace("/", "\\") + ".exe"
- PYLUPDATE4 = PYLUPDATE4.replace("/", "\\") + ".exe"
-
-msg = []
-shell = lambda command: subprocess.Popen(['which', command],
- stdout=subprocess.PIPE).communicate()
-
-if not os.access(PYUIC4, os.F_OK):
- PYUIC4 = shell('pyuic4')[0].strip('\n')
- if not os.access(PYUIC4, os.F_OK):
- msg.append("failed to find pyuic4; tried %s" % PYUIC4)
-
-if not os.access(PYRCC4, os.F_OK):
- PYRCC4 = shell('pyrcc4')[0].strip('\n')
- if not os.access(PYRCC4, os.F_OK):
- msg.append("failed to find pyrcc4; tried %s" % PYRCC4)
-
-if not os.access(PYLUPDATE4, os.F_OK):
- PYLUPDATE4 = shell('pylupdate4')[0].strip('\n')
- if not os.access(PYLUPDATE4, os.F_OK):
- msg.append("failed to find pylupdate4; tried %s" % PYLUPDATE4)
-
-if msg:
- print "\n".join(msg)
- print "try manually editing this program to put the correct " + \
- "paths in place"
- sys.exit()
-
-Debug = False
-Verbose = False
-
-def usage():
- print """usage: mkpyqt.py [options] [path]
-
-Options (which can be given in any of the forms shown):
--b --build build [default]
--c --clean clean
--f --force force
--t --translate translate
--r --recurse recurse
--v --verbose verbose
--D --debug debug
-path defaults to .
-
-If executed with no arguments (or with a build argument) it does a
-build, i.e., it looks for all *.ui and *.qrc files and makes sure that
-the corresponding ui_*.py and qrc_*.py files exist and are up-to-date.
-
-If executed with clean, deletes all ui_*.py and qrc_*.py files that have
-corresponding *.ui and *.qrc files, and all *.pyc and *.pyo files.
-
-If executed with force, it does a clean followed by a build.
-
-If building and the translate option is given, after building, it runs
-pylupdate4 on all .py and .pyw files it encounters, and then runs lrelease
-on all .ts files it encounters. It does not use a .pro file so the .ts
-files must be created in the first place, e.g., using pylupdate4 on one
-of the source files and using its -ts option.
-
-WARNING: Do not give any hand-coded files names that match ui_*.py or
-qrc_*.py since these will be deleted by mkpyqt.py clean!
-
-NOTE: If any tool fails to run, e.g., pyuic4, then edit this program and
-hard-code the path; the variables with the tool paths are near the top
-of the file.
-
-mkpyqt.py v %s. Copyright (c) 2007-9 Qtrac Ltd. All rights reserved.
-""" % __version__
- sys.exit()
-
-
-def report_failure(command, args, process):
- msg = ""
- ba = process.readAllStandardError()
- if not ba.isEmpty():
- msg = ": " + str(QString(ba))
- print "failed", command, " ".join(args), msg
-
-
-def build(path):
- for name in os.listdir(path):
- source = os.path.join(path, name)
- target = None
- if source.endswith(".ui"):
- target = os.path.join(path,
- "ui_" + name.replace(".ui", ".py"))
- command = PYUIC4
- elif source.endswith(".qrc"):
- target = os.path.join(path,
- "qrc_" + name.replace(".qrc", ".py"))
- command = PYRCC4
- process = PyQt4.QtCore.QProcess()
- if target is not None:
- if not os.access(target, os.F_OK) or (
- os.stat(source)[stat.ST_MTIME] > \
- os.stat(target)[stat.ST_MTIME]):
- args = ["-o", target, source]
- if sys.platform.startswith("darwin") and command == PYUIC4:
- command = sys.executable
- args = [PYUIC4] + args
- if Debug:
- print "# %s %s" % (command, " ".join(args))
- else:
- process.start(command, args)
- if not process.waitForFinished(2 * 60 * 1000):
- report_failure(command, args, process)
- else:
- print source, "->", target
- elif Verbose:
- print source, "is up-to-date"
-
-
-def clean(path):
- deletelist = []
- for name in os.listdir(path):
- target = os.path.join(path, name)
- source = None
- if target.endswith(".py") or target.endswith(".pyc") or \
- target.endswith(".pyo"):
- if name.startswith("ui_") and not name[-1] in "oc":
- source = os.path.join(path, name[3:-3] + ".ui")
- elif name.startswith("qrc_"):
- if target[-1] in "oc":
- source = os.path.join(path, name[4:-4] + ".qrc")
- else:
- source = os.path.join(path, name[4:-3] + ".qrc")
- elif target[-1] in "oc":
- source = target[:-1]
- if source is not None:
- if os.access(source, os.F_OK):
- if Debug:
- print "# delete ", target
- else:
- deletelist.append(target)
- else:
- print "will not remove '%s' since `%s' not found" % (
- target, source)
- if not Debug:
- for target in deletelist:
- if Verbose:
- print "deleted", target
- os.remove(target)
-
-
-def translate(path):
- files = []
- tsfiles = []
- for name in os.listdir(path):
- if name.endswith((".py", ".pyw")):
- files.append(os.path.join(path, name))
- elif name.endswith(".ts"):
- tsfiles.append(os.path.join(path, name))
- if not tsfiles:
- return
- verbose = "-verbose" if Verbose else ""
- silent = "-silent" if not Verbose else ""
- process = PyQt4.QtCore.QProcess()
- for ts in tsfiles:
- qm = ts[:-3] + ".qm"
- command1 = PYLUPDATE4
- args1 = [verbose] + files + ["-ts", ts]
- command2 = LRELEASE
- args2 = [silent, ts, "-qm", qm]
- if Debug:
- print "updated", ts
- print "generated", qm
- else:
- process.start(command1, args1)
- if not process.waitForFinished(2 * 60 * 1000):
- report_failure(command1, args1, process)
- process.start(command2, args2)
- if not process.waitForFinished(2 * 60 * 1000):
- report_failure(command2, args2, process)
-
-
-def apply(recurse, function, path):
- if not recurse:
- function(path)
- else:
- for root, dirs, files in os.walk(path):
- for dir in dirs:
- function(os.path.join(root, dir))
-
-
-def main():
- global Debug, Verbose
- function = build
- recurse = False
- trans = False
- force = False
- path = "."
- args = sys.argv[1:]
- while args:
- arg = args.pop(0)
- if arg in ("-D", "--debug", "debug"):
- Debug = True
- elif arg in ("-b", "--build", "build"):
- pass # This is the default
- elif arg in ("-c", "--clean", "clean"):
- function = clean
- elif arg in ("-f", "--force", "force"):
- force = True
- elif arg in ("-t", "--translate", "translate"):
- trans = True
- elif arg in ("-r", "--recurse", "recurse"):
- recurse = True
- elif arg in ("-v", "--verbose", "verbose"):
- Verbose = True
- elif arg in ("-h", "--help", "help"):
- usage()
- else:
- path = arg
- if not force:
- apply(recurse, function, path)
- else:
- apply(recurse, clean, path)
- apply(recurse, build, path)
- if trans and (function == build or force):
- apply(recurse, translate, path)
-
-main()
-
-# 1.0.1 Fixed bug reported by Brian Downing where paths that contained
-# spaces were not handled correctly.
-# 1.0.2 Changed default path on Windows to match PyQt 4.4
-# 1.0.3 Tried to make the paths work on Mac OS X
diff --git a/data/resources/locale.qrc b/data/resources/locale.qrc
deleted file mode 100644
index 47fb5243..00000000
--- a/data/resources/locale.qrc
+++ /dev/null
@@ -1,6 +0,0 @@
-<!DOCTYPE RCC><RCC version="1.0">
-<qresource>
-<file>../translations/es.qm</file>
-<file>../translations/de.qm</file>
-</qresource>
-</RCC>
diff --git a/data/resources/mainwindow.qrc b/data/resources/mainwindow.qrc
deleted file mode 100644
index 58f59c9d..00000000
--- a/data/resources/mainwindow.qrc
+++ /dev/null
@@ -1,11 +0,0 @@
-<!DOCTYPE RCC><RCC version="1.0">
-<qresource prefix="/">
- <file>../images/conn_error.png</file>
- <file>../images/conn_connecting.png</file>
- <file>../images/conn_connected.png</file>
- <file>../images/leap-color-small.png</file>
- <file>../images/Dialog-accept.png</file>
- <file>../images/Dialog-error.png</file>
- <file>../images/Emblem-question.png</file>
-</qresource>
-</RCC>
diff --git a/data/translations/README.rst b/data/translations/README.rst
deleted file mode 100644
index 1f3dd0b3..00000000
--- a/data/translations/README.rst
+++ /dev/null
@@ -1,8 +0,0 @@
-data/translations
-=================
-
-We expect finished translations (i.e., those downloaded from ``transifex``) to live here.
-
-Translator object will pick them from here.
-
-(Actually, from the embedded locale_rc)
diff --git a/data/translations/de.qm b/data/translations/de.qm
deleted file mode 100644
index b2f04f22..00000000
--- a/data/translations/de.qm
+++ /dev/null
Binary files differ
diff --git a/data/translations/de.ts b/data/translations/de.ts
deleted file mode 100644
index f2ab6fa4..00000000
--- a/data/translations/de.ts
+++ /dev/null
@@ -1,218 +0,0 @@
-<?xml version="1.0" ?><!DOCTYPE TS><TS language="de" version="2.0">
-<context>
- <name>IntroPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="14"/>
- <source>First run wizard.</source>
- <translation>Assistent für erstmaligen Start</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="24"/>
- <source>Now we will guide you through some configuration that is needed before you can connect for the first time.&lt;br&gt;&lt;br&gt;If you ever need to modify these options again, you can find the wizard in the &apos;&lt;i&gt;Settings&lt;/i&gt;&apos; menu from the main window.&lt;br&gt;&lt;br&gt;Do you want to &lt;b&gt;sign up&lt;/b&gt; for a new account, or &lt;b&gt;log in&lt;/b&gt; with an already existing username?&lt;br&gt;</source>
- <translation>Wir werden dich nun durch einige Konfigurationen führen, die du für den ersten Start benötigst.&lt;br&gt;&lt;br&gt;Wenn du diese Konfigurationen jemals ändern musst, findest du den Assistenten im &apos;&lt;i&gt;Einstellungen&lt;/i&gt;&apos;-Menü des Haupfensters.&lt;br&gt;&lt;br&gt;Möchtest du dich für einen neuen Account &lt;b&gt;anmelden&lt;/b&gt; oder mit einem bestehenden Usernamen &lt;b&gt;einloggen&lt;/b&gt;?</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="37"/>
- <source>Sign up for a new account.</source>
- <translation>Für einen neuen Account anmelden.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="40"/>
- <source>Log In with my credentials.</source>
- <translation>Mit bestehenden Daten einloggen.</translation>
- </message>
-</context>
-<context>
- <name>LogInPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="96"/>
- <source>Username must be in the username@provider form.</source>
- <translation>Der Username muss in der Form username@provider sein.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="140"/>
- <source>Resolving domain name</source>
- <translation>Auflösen des Domain-Namens</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="163"/>
- <source>Authentication error: %s</source>
- <translation>Authentifizierungsfehler: %s</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="179"/>
- <source>Credentials validated.</source>
- <translation>Anmeldedaten korrekt.</translation>
- </message>
-</context>
-<context>
- <name>ProviderInfoPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providerinfo.py" line="18"/>
- <source>Provider Info</source>
- <translation>Provider-info</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerinfo.py" line="19"/>
- <source>This is what provider says.</source>
- <translation>Das ist, was der Provider sagt.</translation>
- </message>
-</context>
-<context>
- <name>ProviderSetupValidationPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="26"/>
- <source>Provider setup</source>
- <translation>Provider-Setup</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="27"/>
- <source>Doing autoconfig.</source>
- <translation>Führe autoconfig durch.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="83"/>
- <source>Fetching CA certificate</source>
- <translation>Hole CA-Zertifikat</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="105"/>
- <source>Checking CA fingerprint</source>
- <translation>Überprüfe CA-Fingerprint</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="134"/>
- <source>Validating api certificate</source>
- <translation>Überprüfe API-Zertifikat</translation>
- </message>
-</context>
-<context>
- <name>RegisterUserPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="31"/>
- <source>Sign Up</source>
- <translation>Anmelden</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="208"/>
- <source>Registration succeeded!</source>
- <translation>Registrierung erfolgreich!</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="238"/>
- <source>Password does not match..</source>
- <translation>Passwort stimmt nicht überien.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="242"/>
- <source>Password too short.</source>
- <translation>Passwort zu kurz</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="247"/>
- <source>Password too obvious.</source>
- <translation>Passwort zu simpel.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="279"/>
- <source>Error connecting to provider (timeout)</source>
- <translation>Verbindungsfehler zu Provider (timeout)</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="284"/>
- <source>Error Connecting to provider (connerr).</source>
- <translation>Verbindungsfehler zu Provider (connerr)</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="292"/>
- <source>Error during registration (%s)</source>
- <translation>Fehler während der Registrierung (%s)</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="302"/>
- <source>Could not register (bad response)</source>
- <translation>Konnte nicht registrieren (bad response)</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="309"/>
- <source>Username not available.</source>
- <translation>Username nicht verfügbar.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="375"/>
- <source>Register a new user with provider %s.</source>
- <translation>Registriere einen neuen User bei Provider %s</translation>
- </message>
-</context>
-<context>
- <name>RegisterUserValidationPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/regvalidation.py" line="95"/>
- <source>Fetching provider config...</source>
- <translation>Hole Provider-Konfiguration...</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/regvalidation.py" line="112"/>
- <source>Authentication error: %s</source>
- <translation>Authentifizierungsfehler: %s</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/regvalidation.py" line="117"/>
- <source>Fetching eip certificate</source>
- <translation>Hole EIP-Zertifikat</translation>
- </message>
-</context>
-<context>
- <name>SelectProviderPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="32"/>
- <source>Enter Provider</source>
- <translation>Gib Provider ein</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="33"/>
- <source>Please enter the domain of the provider you want to use for your connection.</source>
- <translation>Bitte gib die Domain des Providers an, den du für deine Verbindung nutzen möchtest.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="87"/>
- <source>chec&amp;k!</source>
- <translation type="unfinished"/>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="97"/>
- <source>Server certificate could not be verified.</source>
- <translation>Server-Zertifikat konnte nicht bestätigt werden.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="136"/>
- <source>Certificate validation</source>
- <translation>Zertifikatsüberprüfung</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="222"/>
- <source>checking domain name</source>
- <translation>Prüfe Domain-Name</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="276"/>
- <source>checking https connection</source>
- <translation>Prüfe HTTPS-Verbindung</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="292"/>
- <source>Could not get info from provider.</source>
- <translation>Konnte keine Information vom Provider bekommen.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="295"/>
- <source>Could not download provider info (refused conn.).</source>
- <translation>Konnte Provider-Info nicht herunterladen (refused conn.).</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="305"/>
- <source>fetching provider info</source>
- <translation>Hole Provider-Info</translation>
- </message>
-</context>
-</TS> \ No newline at end of file
diff --git a/data/translations/es.qm b/data/translations/es.qm
deleted file mode 100644
index 8daa2037..00000000
--- a/data/translations/es.qm
+++ /dev/null
Binary files differ
diff --git a/data/translations/es.ts b/data/translations/es.ts
deleted file mode 100644
index 84aa6f0a..00000000
--- a/data/translations/es.ts
+++ /dev/null
@@ -1,218 +0,0 @@
-<?xml version="1.0" ?><!DOCTYPE TS><TS language="es" version="2.0">
-<context>
- <name>IntroPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="14"/>
- <source>First run wizard.</source>
- <translation>Primera Conexion.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="24"/>
- <source>Now we will guide you through some configuration that is needed before you can connect for the first time.&lt;br&gt;&lt;br&gt;If you ever need to modify these options again, you can find the wizard in the &apos;&lt;i&gt;Settings&lt;/i&gt;&apos; menu from the main window.&lt;br&gt;&lt;br&gt;Do you want to &lt;b&gt;sign up&lt;/b&gt; for a new account, or &lt;b&gt;log in&lt;/b&gt; with an already existing username?&lt;br&gt;</source>
- <translation>Vamos a configurar algunas cosas antes de que te puedas conectar por primera vez.&lt;br&gt;&lt;br&gt;Si necesitas modificar estas opciones de nuevo, puedes encontrar este asistente en el menu de &apos;&lt;i&gt;Opciones&lt;/i&gt;&apos; en la ventana principal.&lt;br&gt;&lt;br&gt;Quieres &lt;b&gt;registrar&lt;/b&gt; una nueva cuenta, o &lt;b&gt;loguearte&lt;/b&gt; con tu usuario?&lt;br&gt; </translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="37"/>
- <source>Sign up for a new account.</source>
- <translation>Registrar una cuenta nueva.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="40"/>
- <source>Log In with my credentials.</source>
- <translation>Loguearme con un usuario que ya tengo.</translation>
- </message>
-</context>
-<context>
- <name>LogInPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="96"/>
- <source>Username must be in the username@provider form.</source>
- <translation>El usuario tiene que ser usuario@tu.proveedor</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="140"/>
- <source>Resolving domain name</source>
- <translation>Resolviendo nombre de dominio</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="163"/>
- <source>Authentication error: %s</source>
- <translation>Error de autenticacion: %s</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="179"/>
- <source>Credentials validated.</source>
- <translation>Credenciales validadas.</translation>
- </message>
-</context>
-<context>
- <name>ProviderInfoPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providerinfo.py" line="18"/>
- <source>Provider Info</source>
- <translation>Info del Proveedor</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerinfo.py" line="19"/>
- <source>This is what provider says.</source>
- <translation>Esto es lo que dice el proveedor.</translation>
- </message>
-</context>
-<context>
- <name>ProviderSetupValidationPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="26"/>
- <source>Provider setup</source>
- <translation>Configuracion del Proveedor</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="27"/>
- <source>Doing autoconfig.</source>
- <translation>Autoconfigurando.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="83"/>
- <source>Fetching CA certificate</source>
- <translation>Obteniendo certificado de la CA</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="105"/>
- <source>Checking CA fingerprint</source>
- <translation>Comprobando el fingerprint de la CA</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="134"/>
- <source>Validating api certificate</source>
- <translation>Validando certificado de la api</translation>
- </message>
-</context>
-<context>
- <name>RegisterUserPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="31"/>
- <source>Sign Up</source>
- <translation>Nueva Cuenta</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="208"/>
- <source>Registration succeeded!</source>
- <translation>Cuenta creada con exito!</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="238"/>
- <source>Password does not match..</source>
- <translation>Las contrasenas no son iguales..</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="242"/>
- <source>Password too short.</source>
- <translation>Contrasena demasiado corta.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="247"/>
- <source>Password too obvious.</source>
- <translation>Contrasena demasiado obvia.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="279"/>
- <source>Error connecting to provider (timeout)</source>
- <translation>Error conectandose al proveedor (timeout)</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="284"/>
- <source>Error Connecting to provider (connerr).</source>
- <translation>Error conectandose al proveedor (connerr).</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="292"/>
- <source>Error during registration (%s)</source>
- <translation>Error durante el registro (%s)</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="302"/>
- <source>Could not register (bad response)</source>
- <translation>No se pudo registrar (bad response)</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="309"/>
- <source>Username not available.</source>
- <translation>Usuario no disponible.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="375"/>
- <source>Register a new user with provider %s.</source>
- <translation>Registrar un nuevo usuario con el proveedor %s.</translation>
- </message>
-</context>
-<context>
- <name>RegisterUserValidationPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/regvalidation.py" line="95"/>
- <source>Fetching provider config...</source>
- <translation>Obteniendo configuracion del proveedor...</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/regvalidation.py" line="112"/>
- <source>Authentication error: %s</source>
- <translation>Error de autenticacion: %s</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/regvalidation.py" line="117"/>
- <source>Fetching eip certificate</source>
- <translation>Obteniendo certificado eip</translation>
- </message>
-</context>
-<context>
- <name>SelectProviderPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="32"/>
- <source>Enter Provider</source>
- <translation>Entra tu Proveedor</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="33"/>
- <source>Please enter the domain of the provider you want to use for your connection.</source>
- <translation>Por favor, rellena el dominio del proveedor que quieras usar para tu conexion.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="87"/>
- <source>chec&amp;k!</source>
- <translation>compro&amp;bar!</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="97"/>
- <source>Server certificate could not be verified.</source>
- <translation>No se pudo verificar el certificado del servidor.</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="136"/>
- <source>Certificate validation</source>
- <translation>Validacion del certificado</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="222"/>
- <source>checking domain name</source>
- <translation>comprobando nombre de dominio</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="276"/>
- <source>checking https connection</source>
- <translation>comprobando conexion https</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="292"/>
- <source>Could not get info from provider.</source>
- <translation>no se pudo obtener info del proveedor</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="295"/>
- <source>Could not download provider info (refused conn.).</source>
- <translation>no se pudo obtener info del proveedor (refused conn.).</translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="305"/>
- <source>fetching provider info</source>
- <translation>obteniendo info del preveedor</translation>
- </message>
-</context>
-</TS> \ No newline at end of file
diff --git a/data/ts/README.rst b/data/ts/README.rst
deleted file mode 100644
index 3db2d104..00000000
--- a/data/ts/README.rst
+++ /dev/null
@@ -1,14 +0,0 @@
-data/ts
-=======
-
-Here we expect the .ts files generated by typing::
-
- $ make translations
-
-Which will generate the sources (en_US)
-
-For uploading a source::
-
- $ tx push -s
-
-Translator should pick finished ``.qm`` files from ``data/translations`` instead of this folder.
diff --git a/data/ts/en_US.ts b/data/ts/en_US.ts
deleted file mode 100644
index d2cba837..00000000
--- a/data/ts/en_US.ts
+++ /dev/null
@@ -1,477 +0,0 @@
-<?xml version="1.0" encoding="utf-8"?>
-<!DOCTYPE TS><TS version="2.0">
-<context>
- <name>ConnectionPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/connect.py" line="26"/>
- <source>Connecting...</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/connect.py" line="27"/>
- <source>Setting up a encrypted connection with the provider</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/connect.py" line="85"/>
- <source>Getting EIP configuration files</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/connect.py" line="101"/>
- <source>Authentication error: %s</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/connect.py" line="109"/>
- <source>Getting EIP certificate</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>EIPConductorAppMixin</name>
- <message>
- <location filename="../src/leap/baseapp/eip.py" line="221"/>
- <source>&amp;Disconnect</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/eip.py" line="235"/>
- <source>&amp;Connect</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>EIPErrors</name>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="66"/>
- <source>We could not find any authentication agent in your system.&lt;br/&gt;Make sure you have &lt;b&gt;polkit-gnome-authentication-agent-1&lt;/b&gt; running and try again.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="77"/>
- <source>We could not find &lt;b&gt;pkexec&lt;/b&gt; in your system.&lt;br/&gt; Do you want to try &lt;b&gt;setuid workaround&lt;/b&gt;? (&lt;i&gt;DOES NOTHING YET&lt;/i&gt;)</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="88"/>
- <source>No suitable openvpn command found. &lt;br/&gt;(Might be a permissions problem)</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="97"/>
- <source>there is a problem with provider certificate</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="104"/>
- <source>an error occurred during configuratio of leap services</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="119"/>
- <source>Server does not allow secure connections</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="126"/>
- <source>Server certificate could not be verified</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="137"/>
- <source>We could not find your eip certs in the expected path</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/eip/exceptions.py" line="111"/>
- <source>Another OpenVPN Process has been detected. Please close it before starting leap-client</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>Errors</name>
- <message>
- <location filename="../src/leap/base/exceptions.py" line="57"/>
- <source>Interface not found</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/base/exceptions.py" line="64"/>
- <source>Looks like your computer is not connected to the internet</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/base/exceptions.py" line="72"/>
- <source>Looks like there are problems with your internet connection</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/base/exceptions.py" line="80"/>
- <source>It looks like there is no internet connection.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/base/exceptions.py" line="88"/>
- <source>Domain cannot be found</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/base/exceptions.py" line="95"/>
- <source>The Encrypted Connection was lost.</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>IntroPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="14"/>
- <source>First run wizard</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="37"/>
- <source>Sign up for a new account</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="40"/>
- <source>Log In with my credentials</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/intro.py" line="24"/>
- <source>Now we will guide you through some configuration that is needed before you can connect for the first time.&lt;br&gt;&lt;br&gt;If you ever need to modify these options again, you can find the wizard in the &apos;&lt;i&gt;Settings&lt;/i&gt;&apos; menu from the main window.&lt;br&gt;&lt;br&gt;Do you want to &lt;b&gt;sign up&lt;/b&gt; for a new account, or &lt;b&gt;log in&lt;/b&gt; with an already existing username?&lt;br&gt;</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>LastPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/last.py" line="18"/>
- <source>Connecting to Encrypted Internet Proxy service...</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/last.py" line="66"/>
- <source>Click &apos;&lt;i&gt;%s&lt;/i&gt;&apos; to end the wizard and save your settings.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/last.py" line="62"/>
- <source>You are now using an encrypted connection!</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>LogInPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="96"/>
- <source>Username must be in the username@provider form.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="140"/>
- <source>Resolving domain name</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="163"/>
- <source>Authentication error: %s</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="179"/>
- <source>Credentials validated.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="24"/>
- <source>Log In</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="38"/>
- <source>User &amp;name:</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="53"/>
- <source>&amp;Password:</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="80"/>
- <source>Log in</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/login.py" line="25"/>
- <source>Log in with your credentials</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>LogPaneMixin</name>
- <message>
- <location filename="../src/leap/baseapp/log.py" line="25"/>
- <source>&amp;Connect</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/log.py" line="38"/>
- <source>Disconnected</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>ProviderInfoPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providerinfo.py" line="19"/>
- <source>Provider Information</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerinfo.py" line="20"/>
- <source>Services offered by this provider</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerinfo.py" line="95"/>
- <source>enrollment policy</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>ProviderSetupValidationPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="28"/>
- <source>Provider setup</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="85"/>
- <source>Fetching CA certificate</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="107"/>
- <source>Checking CA fingerprint</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="125"/>
- <source>Validating api certificate</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providersetup.py" line="29"/>
- <source>Gathering configuration options for this provider</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>RegisterUserPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="31"/>
- <source>Sign Up</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="208"/>
- <source>Registration succeeded!</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="244"/>
- <source>Password does not match..</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="248"/>
- <source>Password too short.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="253"/>
- <source>Password too obvious.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="282"/>
- <source>Error connecting to provider (timeout)</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="287"/>
- <source>Error Connecting to provider (connerr).</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="295"/>
- <source>Error during registration (%s)</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="305"/>
- <source>Could not register (bad response)</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="312"/>
- <source>Username not available.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="48"/>
- <source>User &amp;name:</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="60"/>
- <source>&amp;Password:</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="66"/>
- <source>Password (again):</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="72"/>
- <source>&amp;Remember username and password.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/register.py" line="378"/>
- <source>Register a new user with provider &lt;em&gt;%s&lt;/em&gt;</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>SelectProviderPage</name>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="32"/>
- <source>Enter Provider</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="87"/>
- <source>chec&amp;k!</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="97"/>
- <source>Server certificate could not be verified.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="136"/>
- <source>Certificate validation</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="290"/>
- <source>Could not get info from provider.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="293"/>
- <source>Could not download provider info (refused conn.).</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="103"/>
- <source>&amp;Trust this provider certificate.</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="344"/>
- <source>Do you want to &lt;b&gt;trust this provider certificate?&lt;/b&gt;</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="347"/>
- <source>SHA-256 fingerprint: &lt;i&gt;%s&lt;/i&gt;&lt;br&gt;</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="33"/>
- <source>Please enter the domain of the provider you want to use for your connection</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="222"/>
- <source>Checking if it is a valid provider</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="276"/>
- <source>Checking for a secure connection</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/gui/firstrun/providerselect.py" line="303"/>
- <source>Getting info from the provider</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-<context>
- <name>StatusAwareTrayIconMixin</name>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="78"/>
- <source>EIP Connection Status</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="87"/>
- <source>&lt;b&gt;disconnected&lt;/b&gt;</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="126"/>
- <source>Encryption ON turn &amp;off</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="131"/>
- <source>&amp;Details...</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="135"/>
- <source>&amp;About</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="138"/>
- <source>About Q&amp;t</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="141"/>
- <source>&amp;Quit</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="152"/>
- <source>Encryption ON turn o&amp;ff</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="157"/>
- <source>Encryption OFF turn &amp;on</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="162"/>
- <source>connecting...</source>
- <translation type="unfinished"></translation>
- </message>
- <message>
- <location filename="../src/leap/baseapp/systray.py" line="185"/>
- <source>About</source>
- <translation type="unfinished"></translation>
- </message>
-</context>
-</TS>
diff --git a/docs/COPYING b/docs/COPYING
deleted file mode 100644
index 94a9ed02..00000000
--- a/docs/COPYING
+++ /dev/null
@@ -1,674 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
-
- The licenses for most software and other practical works are designed
-to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
-share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-them if you wish), that you receive source code or can get it if you
-want it, that you can change the software or use pieces of it in new
-free programs, and that you know you can do these things.
-
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
-
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
-
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- TERMS AND CONDITIONS
-
- 0. Definitions.
-
- "This License" refers to version 3 of the GNU General Public License.
-
- "Copyright" also means copyright-like laws that apply to other kinds of
-works, such as semiconductor masks.
-
- "The Program" refers to any copyrightable work licensed under this
-License. Each licensee is addressed as "you". "Licensees" and
-"recipients" may be individuals or organizations.
-
- To "modify" a work means to copy from or adapt all or part of the work
-in a fashion requiring copyright permission, other than the making of an
-exact copy. The resulting work is called a "modified version" of the
-earlier work or a work "based on" the earlier work.
-
- A "covered work" means either the unmodified Program or a work based
-on the Program.
-
- To "propagate" a work means to do anything with it that, without
-permission, would make you directly or secondarily liable for
-infringement under applicable copyright law, except executing it on a
-computer or modifying a private copy. Propagation includes copying,
-distribution (with or without modification), making available to the
-public, and in some countries other activities as well.
-
- To "convey" a work means any kind of propagation that enables other
-parties to make or receive copies. Mere interaction with a user through
-a computer network, with no transfer of a copy, is not conveying.
-
- An interactive user interface displays "Appropriate Legal Notices"
-to the extent that it includes a convenient and prominently visible
-feature that (1) displays an appropriate copyright notice, and (2)
-tells the user that there is no warranty for the work (except to the
-extent that warranties are provided), that licensees may convey the
-work under this License, and how to view a copy of this License. If
-the interface presents a list of user commands or options, such as a
-menu, a prominent item in the list meets this criterion.
-
- 1. Source Code.
-
- The "source code" for a work means the preferred form of the work
-for making modifications to it. "Object code" means any non-source
-form of a work.
-
- A "Standard Interface" means an interface that either is an official
-standard defined by a recognized standards body, or, in the case of
-interfaces specified for a particular programming language, one that
-is widely used among developers working in that language.
-
- The "System Libraries" of an executable work include anything, other
-than the work as a whole, that (a) is included in the normal form of
-packaging a Major Component, but which is not part of that Major
-Component, and (b) serves only to enable use of the work with that
-Major Component, or to implement a Standard Interface for which an
-implementation is available to the public in source code form. A
-"Major Component", in this context, means a major essential component
-(kernel, window system, and so on) of the specific operating system
-(if any) on which the executable work runs, or a compiler used to
-produce the work, or an object code interpreter used to run it.
-
- The "Corresponding Source" for a work in object code form means all
-the source code needed to generate, install, and (for an executable
-work) run the object code and to modify the work, including scripts to
-control those activities. However, it does not include the work's
-System Libraries, or general-purpose tools or generally available free
-programs which are used unmodified in performing those activities but
-which are not part of the work. For example, Corresponding Source
-includes interface definition files associated with source files for
-the work, and the source code for shared libraries and dynamically
-linked subprograms that the work is specifically designed to require,
-such as by intimate data communication or control flow between those
-subprograms and other parts of the work.
-
- The Corresponding Source need not include anything that users
-can regenerate automatically from other parts of the Corresponding
-Source.
-
- The Corresponding Source for a work in source code form is that
-same work.
-
- 2. Basic Permissions.
-
- All rights granted under this License are granted for the term of
-copyright on the Program, and are irrevocable provided the stated
-conditions are met. This License explicitly affirms your unlimited
-permission to run the unmodified Program. The output from running a
-covered work is covered by this License only if the output, given its
-content, constitutes a covered work. This License acknowledges your
-rights of fair use or other equivalent, as provided by copyright law.
-
- You may make, run and propagate covered works that you do not
-convey, without conditions so long as your license otherwise remains
-in force. You may convey covered works to others for the sole purpose
-of having them make modifications exclusively for you, or provide you
-with facilities for running those works, provided that you comply with
-the terms of this License in conveying all material for which you do
-not control copyright. Those thus making or running the covered works
-for you must do so exclusively on your behalf, under your direction
-and control, on terms that prohibit them from making any copies of
-your copyrighted material outside their relationship with you.
-
- Conveying under any other circumstances is permitted solely under
-the conditions stated below. Sublicensing is not allowed; section 10
-makes it unnecessary.
-
- 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
-
- No covered work shall be deemed part of an effective technological
-measure under any applicable law fulfilling obligations under article
-11 of the WIPO copyright treaty adopted on 20 December 1996, or
-similar laws prohibiting or restricting circumvention of such
-measures.
-
- When you convey a covered work, you waive any legal power to forbid
-circumvention of technological measures to the extent such circumvention
-is effected by exercising rights under this License with respect to
-the covered work, and you disclaim any intention to limit operation or
-modification of the work as a means of enforcing, against the work's
-users, your or third parties' legal rights to forbid circumvention of
-technological measures.
-
- 4. Conveying Verbatim Copies.
-
- You may convey verbatim copies of the Program's source code as you
-receive it, in any medium, provided that you conspicuously and
-appropriately publish on each copy an appropriate copyright notice;
-keep intact all notices stating that this License and any
-non-permissive terms added in accord with section 7 apply to the code;
-keep intact all notices of the absence of any warranty; and give all
-recipients a copy of this License along with the Program.
-
- You may charge any price or no price for each copy that you convey,
-and you may offer support or warranty protection for a fee.
-
- 5. Conveying Modified Source Versions.
-
- You may convey a work based on the Program, or the modifications to
-produce it from the Program, in the form of source code under the
-terms of section 4, provided that you also meet all of these conditions:
-
- a) The work must carry prominent notices stating that you modified
- it, and giving a relevant date.
-
- b) The work must carry prominent notices stating that it is
- released under this License and any conditions added under section
- 7. This requirement modifies the requirement in section 4 to
- "keep intact all notices".
-
- c) You must license the entire work, as a whole, under this
- License to anyone who comes into possession of a copy. This
- License will therefore apply, along with any applicable section 7
- additional terms, to the whole of the work, and all its parts,
- regardless of how they are packaged. This License gives no
- permission to license the work in any other way, but it does not
- invalidate such permission if you have separately received it.
-
- d) If the work has interactive user interfaces, each must display
- Appropriate Legal Notices; however, if the Program has interactive
- interfaces that do not display Appropriate Legal Notices, your
- work need not make them do so.
-
- A compilation of a covered work with other separate and independent
-works, which are not by their nature extensions of the covered work,
-and which are not combined with it such as to form a larger program,
-in or on a volume of a storage or distribution medium, is called an
-"aggregate" if the compilation and its resulting copyright are not
-used to limit the access or legal rights of the compilation's users
-beyond what the individual works permit. Inclusion of a covered work
-in an aggregate does not cause this License to apply to the other
-parts of the aggregate.
-
- 6. Conveying Non-Source Forms.
-
- You may convey a covered work in object code form under the terms
-of sections 4 and 5, provided that you also convey the
-machine-readable Corresponding Source under the terms of this License,
-in one of these ways:
-
- a) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by the
- Corresponding Source fixed on a durable physical medium
- customarily used for software interchange.
-
- b) Convey the object code in, or embodied in, a physical product
- (including a physical distribution medium), accompanied by a
- written offer, valid for at least three years and valid for as
- long as you offer spare parts or customer support for that product
- model, to give anyone who possesses the object code either (1) a
- copy of the Corresponding Source for all the software in the
- product that is covered by this License, on a durable physical
- medium customarily used for software interchange, for a price no
- more than your reasonable cost of physically performing this
- conveying of source, or (2) access to copy the
- Corresponding Source from a network server at no charge.
-
- c) Convey individual copies of the object code with a copy of the
- written offer to provide the Corresponding Source. This
- alternative is allowed only occasionally and noncommercially, and
- only if you received the object code with such an offer, in accord
- with subsection 6b.
-
- d) Convey the object code by offering access from a designated
- place (gratis or for a charge), and offer equivalent access to the
- Corresponding Source in the same way through the same place at no
- further charge. You need not require recipients to copy the
- Corresponding Source along with the object code. If the place to
- copy the object code is a network server, the Corresponding Source
- may be on a different server (operated by you or a third party)
- that supports equivalent copying facilities, provided you maintain
- clear directions next to the object code saying where to find the
- Corresponding Source. Regardless of what server hosts the
- Corresponding Source, you remain obligated to ensure that it is
- available for as long as needed to satisfy these requirements.
-
- e) Convey the object code using peer-to-peer transmission, provided
- you inform other peers where the object code and Corresponding
- Source of the work are being offered to the general public at no
- charge under subsection 6d.
-
- A separable portion of the object code, whose source code is excluded
-from the Corresponding Source as a System Library, need not be
-included in conveying the object code work.
-
- A "User Product" is either (1) a "consumer product", which means any
-tangible personal property which is normally used for personal, family,
-or household purposes, or (2) anything designed or sold for incorporation
-into a dwelling. In determining whether a product is a consumer product,
-doubtful cases shall be resolved in favor of coverage. For a particular
-product received by a particular user, "normally used" refers to a
-typical or common use of that class of product, regardless of the status
-of the particular user or of the way in which the particular user
-actually uses, or expects or is expected to use, the product. A product
-is a consumer product regardless of whether the product has substantial
-commercial, industrial or non-consumer uses, unless such uses represent
-the only significant mode of use of the product.
-
- "Installation Information" for a User Product means any methods,
-procedures, authorization keys, or other information required to install
-and execute modified versions of a covered work in that User Product from
-a modified version of its Corresponding Source. The information must
-suffice to ensure that the continued functioning of the modified object
-code is in no case prevented or interfered with solely because
-modification has been made.
-
- If you convey an object code work under this section in, or with, or
-specifically for use in, a User Product, and the conveying occurs as
-part of a transaction in which the right of possession and use of the
-User Product is transferred to the recipient in perpetuity or for a
-fixed term (regardless of how the transaction is characterized), the
-Corresponding Source conveyed under this section must be accompanied
-by the Installation Information. But this requirement does not apply
-if neither you nor any third party retains the ability to install
-modified object code on the User Product (for example, the work has
-been installed in ROM).
-
- The requirement to provide Installation Information does not include a
-requirement to continue to provide support service, warranty, or updates
-for a work that has been modified or installed by the recipient, or for
-the User Product in which it has been modified or installed. Access to a
-network may be denied when the modification itself materially and
-adversely affects the operation of the network or violates the rules and
-protocols for communication across the network.
-
- Corresponding Source conveyed, and Installation Information provided,
-in accord with this section must be in a format that is publicly
-documented (and with an implementation available to the public in
-source code form), and must require no special password or key for
-unpacking, reading or copying.
-
- 7. Additional Terms.
-
- "Additional permissions" are terms that supplement the terms of this
-License by making exceptions from one or more of its conditions.
-Additional permissions that are applicable to the entire Program shall
-be treated as though they were included in this License, to the extent
-that they are valid under applicable law. If additional permissions
-apply only to part of the Program, that part may be used separately
-under those permissions, but the entire Program remains governed by
-this License without regard to the additional permissions.
-
- When you convey a copy of a covered work, you may at your option
-remove any additional permissions from that copy, or from any part of
-it. (Additional permissions may be written to require their own
-removal in certain cases when you modify the work.) You may place
-additional permissions on material, added by you to a covered work,
-for which you have or can give appropriate copyright permission.
-
- Notwithstanding any other provision of this License, for material you
-add to a covered work, you may (if authorized by the copyright holders of
-that material) supplement the terms of this License with terms:
-
- a) Disclaiming warranty or limiting liability differently from the
- terms of sections 15 and 16 of this License; or
-
- b) Requiring preservation of specified reasonable legal notices or
- author attributions in that material or in the Appropriate Legal
- Notices displayed by works containing it; or
-
- c) Prohibiting misrepresentation of the origin of that material, or
- requiring that modified versions of such material be marked in
- reasonable ways as different from the original version; or
-
- d) Limiting the use for publicity purposes of names of licensors or
- authors of the material; or
-
- e) Declining to grant rights under trademark law for use of some
- trade names, trademarks, or service marks; or
-
- f) Requiring indemnification of licensors and authors of that
- material by anyone who conveys the material (or modified versions of
- it) with contractual assumptions of liability to the recipient, for
- any liability that these contractual assumptions directly impose on
- those licensors and authors.
-
- All other non-permissive additional terms are considered "further
-restrictions" within the meaning of section 10. If the Program as you
-received it, or any part of it, contains a notice stating that it is
-governed by this License along with a term that is a further
-restriction, you may remove that term. If a license document contains
-a further restriction but permits relicensing or conveying under this
-License, you may add to a covered work material governed by the terms
-of that license document, provided that the further restriction does
-not survive such relicensing or conveying.
-
- If you add terms to a covered work in accord with this section, you
-must place, in the relevant source files, a statement of the
-additional terms that apply to those files, or a notice indicating
-where to find the applicable terms.
-
- Additional terms, permissive or non-permissive, may be stated in the
-form of a separately written license, or stated as exceptions;
-the above requirements apply either way.
-
- 8. Termination.
-
- You may not propagate or modify a covered work except as expressly
-provided under this License. Any attempt otherwise to propagate or
-modify it is void, and will automatically terminate your rights under
-this License (including any patent licenses granted under the third
-paragraph of section 11).
-
- However, if you cease all violation of this License, then your
-license from a particular copyright holder is reinstated (a)
-provisionally, unless and until the copyright holder explicitly and
-finally terminates your license, and (b) permanently, if the copyright
-holder fails to notify you of the violation by some reasonable means
-prior to 60 days after the cessation.
-
- Moreover, your license from a particular copyright holder is
-reinstated permanently if the copyright holder notifies you of the
-violation by some reasonable means, this is the first time you have
-received notice of violation of this License (for any work) from that
-copyright holder, and you cure the violation prior to 30 days after
-your receipt of the notice.
-
- Termination of your rights under this section does not terminate the
-licenses of parties who have received copies or rights from you under
-this License. If your rights have been terminated and not permanently
-reinstated, you do not qualify to receive new licenses for the same
-material under section 10.
-
- 9. Acceptance Not Required for Having Copies.
-
- You are not required to accept this License in order to receive or
-run a copy of the Program. Ancillary propagation of a covered work
-occurring solely as a consequence of using peer-to-peer transmission
-to receive a copy likewise does not require acceptance. However,
-nothing other than this License grants you permission to propagate or
-modify any covered work. These actions infringe copyright if you do
-not accept this License. Therefore, by modifying or propagating a
-covered work, you indicate your acceptance of this License to do so.
-
- 10. Automatic Licensing of Downstream Recipients.
-
- Each time you convey a covered work, the recipient automatically
-receives a license from the original licensors, to run, modify and
-propagate that work, subject to this License. You are not responsible
-for enforcing compliance by third parties with this License.
-
- An "entity transaction" is a transaction transferring control of an
-organization, or substantially all assets of one, or subdividing an
-organization, or merging organizations. If propagation of a covered
-work results from an entity transaction, each party to that
-transaction who receives a copy of the work also receives whatever
-licenses to the work the party's predecessor in interest had or could
-give under the previous paragraph, plus a right to possession of the
-Corresponding Source of the work from the predecessor in interest, if
-the predecessor has it or can get it with reasonable efforts.
-
- You may not impose any further restrictions on the exercise of the
-rights granted or affirmed under this License. For example, you may
-not impose a license fee, royalty, or other charge for exercise of
-rights granted under this License, and you may not initiate litigation
-(including a cross-claim or counterclaim in a lawsuit) alleging that
-any patent claim is infringed by making, using, selling, offering for
-sale, or importing the Program or any portion of it.
-
- 11. Patents.
-
- A "contributor" is a copyright holder who authorizes use under this
-License of the Program or a work on which the Program is based. The
-work thus licensed is called the contributor's "contributor version".
-
- A contributor's "essential patent claims" are all patent claims
-owned or controlled by the contributor, whether already acquired or
-hereafter acquired, that would be infringed by some manner, permitted
-by this License, of making, using, or selling its contributor version,
-but do not include claims that would be infringed only as a
-consequence of further modification of the contributor version. For
-purposes of this definition, "control" includes the right to grant
-patent sublicenses in a manner consistent with the requirements of
-this License.
-
- Each contributor grants you a non-exclusive, worldwide, royalty-free
-patent license under the contributor's essential patent claims, to
-make, use, sell, offer for sale, import and otherwise run, modify and
-propagate the contents of its contributor version.
-
- In the following three paragraphs, a "patent license" is any express
-agreement or commitment, however denominated, not to enforce a patent
-(such as an express permission to practice a patent or covenant not to
-sue for patent infringement). To "grant" such a patent license to a
-party means to make such an agreement or commitment not to enforce a
-patent against the party.
-
- If you convey a covered work, knowingly relying on a patent license,
-and the Corresponding Source of the work is not available for anyone
-to copy, free of charge and under the terms of this License, through a
-publicly available network server or other readily accessible means,
-then you must either (1) cause the Corresponding Source to be so
-available, or (2) arrange to deprive yourself of the benefit of the
-patent license for this particular work, or (3) arrange, in a manner
-consistent with the requirements of this License, to extend the patent
-license to downstream recipients. "Knowingly relying" means you have
-actual knowledge that, but for the patent license, your conveying the
-covered work in a country, or your recipient's use of the covered work
-in a country, would infringe one or more identifiable patents in that
-country that you have reason to believe are valid.
-
- If, pursuant to or in connection with a single transaction or
-arrangement, you convey, or propagate by procuring conveyance of, a
-covered work, and grant a patent license to some of the parties
-receiving the covered work authorizing them to use, propagate, modify
-or convey a specific copy of the covered work, then the patent license
-you grant is automatically extended to all recipients of the covered
-work and works based on it.
-
- A patent license is "discriminatory" if it does not include within
-the scope of its coverage, prohibits the exercise of, or is
-conditioned on the non-exercise of one or more of the rights that are
-specifically granted under this License. You may not convey a covered
-work if you are a party to an arrangement with a third party that is
-in the business of distributing software, under which you make payment
-to the third party based on the extent of your activity of conveying
-the work, and under which the third party grants, to any of the
-parties who would receive the covered work from you, a discriminatory
-patent license (a) in connection with copies of the covered work
-conveyed by you (or copies made from those copies), or (b) primarily
-for and in connection with specific products or compilations that
-contain the covered work, unless you entered into that arrangement,
-or that patent license was granted, prior to 28 March 2007.
-
- Nothing in this License shall be construed as excluding or limiting
-any implied license or other defenses to infringement that may
-otherwise be available to you under applicable patent law.
-
- 12. No Surrender of Others' Freedom.
-
- If conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot convey a
-covered work so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you may
-not convey it at all. For example, if you agree to terms that obligate you
-to collect a royalty for further conveying from those to whom you convey
-the Program, the only way you could satisfy both those terms and this
-License would be to refrain entirely from conveying the Program.
-
- 13. Use with the GNU Affero General Public License.
-
- Notwithstanding any other provision of this License, you have
-permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
-combined work, and to convey the resulting work. The terms of this
-License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
-
- 14. Revised Versions of this License.
-
- The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
- Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
-Public License "or any later version" applies to it, you have the
-option of following the terms and conditions either of that numbered
-version or of any later version published by the Free Software
-Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
-by the Free Software Foundation.
-
- If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
-public statement of acceptance of a version permanently authorizes you
-to choose that version for the Program.
-
- Later license versions may give you additional or different
-permissions. However, no additional obligations are imposed on any
-author or copyright holder as a result of your choosing to follow a
-later version.
-
- 15. Disclaimer of Warranty.
-
- THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
-APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
-HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
-OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
-IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
-ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. Limitation of Liability.
-
- IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
-THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
-GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
-USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
-DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
-PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
-EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
-SUCH DAMAGES.
-
- 17. Interpretation of Sections 15 and 16.
-
- If the disclaimer of warranty and limitation of liability provided
-above cannot be given local legal effect according to their terms,
-reviewing courts shall apply local law that most closely approximates
-an absolute waiver of all civil liability in connection with the
-Program, unless a warranty or assumption of liability accompanies a
-copy of the Program in return for a fee.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-state the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program. If not, see <http://www.gnu.org/licenses/>.
-
-Also add information on how to contact you by electronic and paper mail.
-
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- <program> Copyright (C) <year> <name of author>
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
-
- You should also get your employer (if you work as a programmer) or school,
-if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-<http://www.gnu.org/licenses/>.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-<http://www.gnu.org/philosophy/why-not-lgpl.html>.
diff --git a/docs/Makefile b/docs/Makefile
deleted file mode 100644
index 16aa258b..00000000
--- a/docs/Makefile
+++ /dev/null
@@ -1,153 +0,0 @@
-# Makefile for Sphinx documentation
-#
-
-# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
-PAPER =
-BUILDDIR = _build
-
-# Internal variables.
-PAPEROPT_a4 = -D latex_paper_size=a4
-PAPEROPT_letter = -D latex_paper_size=letter
-ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-# the i18n builder cannot share the environment and doctrees with the others
-I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
-
-.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
-
-help:
- @echo "Please use \`make <target>' where <target> is one of"
- @echo " html to make standalone HTML files"
- @echo " dirhtml to make HTML files named index.html in directories"
- @echo " singlehtml to make a single large HTML file"
- @echo " pickle to make pickle files"
- @echo " json to make JSON files"
- @echo " htmlhelp to make HTML files and a HTML help project"
- @echo " qthelp to make HTML files and a qthelp project"
- @echo " devhelp to make HTML files and a Devhelp project"
- @echo " epub to make an epub"
- @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
- @echo " latexpdf to make LaTeX files and run them through pdflatex"
- @echo " text to make text files"
- @echo " man to make manual pages"
- @echo " texinfo to make Texinfo files"
- @echo " info to make Texinfo files and run them through makeinfo"
- @echo " gettext to make PO message catalogs"
- @echo " changes to make an overview of all changed/added/deprecated items"
- @echo " linkcheck to check all external links for integrity"
- @echo " doctest to run all doctests embedded in the documentation (if enabled)"
-
-clean:
- -rm -rf $(BUILDDIR)/*
-
-html:
- $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
-
-dirhtml:
- $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
- @echo
- @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
-
-singlehtml:
- $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
- @echo
- @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
-
-pickle:
- $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
- @echo
- @echo "Build finished; now you can process the pickle files."
-
-json:
- $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
- @echo
- @echo "Build finished; now you can process the JSON files."
-
-htmlhelp:
- $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
- @echo
- @echo "Build finished; now you can run HTML Help Workshop with the" \
- ".hhp project file in $(BUILDDIR)/htmlhelp."
-
-qthelp:
- $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
- @echo
- @echo "Build finished; now you can run "qcollectiongenerator" with the" \
- ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
- @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/LEAP.qhcp"
- @echo "To view the help file:"
- @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/LEAP.qhc"
-
-devhelp:
- $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
- @echo
- @echo "Build finished."
- @echo "To view the help file:"
- @echo "# mkdir -p $$HOME/.local/share/devhelp/LEAP"
- @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/LEAP"
- @echo "# devhelp"
-
-epub:
- $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
- @echo
- @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
-
-latex:
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
- @echo
- @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
- @echo "Run \`make' in that directory to run these through (pdf)latex" \
- "(use \`make latexpdf' here to do that automatically)."
-
-latexpdf:
- $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
- @echo "Running LaTeX files through pdflatex..."
- $(MAKE) -C $(BUILDDIR)/latex all-pdf
- @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
-
-text:
- $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
- @echo
- @echo "Build finished. The text files are in $(BUILDDIR)/text."
-
-man:
- $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
- @echo
- @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
-
-texinfo:
- $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
- @echo
- @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
- @echo "Run \`make' in that directory to run these through makeinfo" \
- "(use \`make info' here to do that automatically)."
-
-info:
- $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
- @echo "Running Texinfo files through makeinfo..."
- make -C $(BUILDDIR)/texinfo info
- @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
-
-gettext:
- $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
- @echo
- @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
-
-changes:
- $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
- @echo
- @echo "The overview file is in $(BUILDDIR)/changes."
-
-linkcheck:
- $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
- @echo
- @echo "Link check complete; look for any errors in the above output " \
- "or in $(BUILDDIR)/linkcheck/output.txt."
-
-doctest:
- $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
- @echo "Testing of doctests in the sources finished, look at the " \
- "results in $(BUILDDIR)/doctest/output.txt."
diff --git a/docs/api/leap.base.rst b/docs/api/leap.base.rst
deleted file mode 100644
index 778836c4..00000000
--- a/docs/api/leap.base.rst
+++ /dev/null
@@ -1,98 +0,0 @@
-base Package
-============
-
-:mod:`auth` Module
-------------------
-
-.. automodule:: leap.base.auth
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`authentication` Module
-----------------------------
-
-.. automodule:: leap.base.authentication
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`checks` Module
---------------------
-
-.. automodule:: leap.base.checks
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`config` Module
---------------------
-
-.. automodule:: leap.base.config
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`connection` Module
-------------------------
-
-.. automodule:: leap.base.connection
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`constants` Module
------------------------
-
-.. automodule:: leap.base.constants
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`exceptions` Module
-------------------------
-
-.. automodule:: leap.base.exceptions
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`network` Module
----------------------
-
-.. automodule:: leap.base.network
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`pluggableconfig` Module
------------------------------
-
-.. automodule:: leap.base.pluggableconfig
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`providers` Module
------------------------
-
-.. automodule:: leap.base.providers
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`specs` Module
--------------------
-
-.. automodule:: leap.base.specs
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
-
- leap.base.tests
-
diff --git a/docs/api/leap.base.tests.rst b/docs/api/leap.base.tests.rst
deleted file mode 100644
index 7af035b0..00000000
--- a/docs/api/leap.base.tests.rst
+++ /dev/null
@@ -1,43 +0,0 @@
-tests Package
-=============
-
-:mod:`test_auth` Module
------------------------
-
-.. automodule:: leap.base.tests.test_auth
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_checks` Module
--------------------------
-
-.. automodule:: leap.base.tests.test_checks
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_config` Module
--------------------------
-
-.. automodule:: leap.base.tests.test_config
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_providers` Module
-----------------------------
-
-.. automodule:: leap.base.tests.test_providers
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_validation` Module
------------------------------
-
-.. automodule:: leap.base.tests.test_validation
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/leap.baseapp.rst b/docs/api/leap.baseapp.rst
deleted file mode 100644
index f24b4976..00000000
--- a/docs/api/leap.baseapp.rst
+++ /dev/null
@@ -1,75 +0,0 @@
-baseapp Package
-===============
-
-:mod:`constants` Module
------------------------
-
-.. automodule:: leap.baseapp.constants
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`dialogs` Module
----------------------
-
-.. automodule:: leap.baseapp.dialogs
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`eip` Module
------------------
-
-.. automodule:: leap.baseapp.eip
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`leap_app` Module
-----------------------
-
-.. automodule:: leap.baseapp.leap_app
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`log` Module
------------------
-
-.. automodule:: leap.baseapp.log
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`mainwindow` Module
-------------------------
-
-.. automodule:: leap.baseapp.mainwindow
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`network` Module
----------------------
-
-.. automodule:: leap.baseapp.network
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`permcheck` Module
------------------------
-
-.. automodule:: leap.baseapp.permcheck
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`systray` Module
----------------------
-
-.. automodule:: leap.baseapp.systray
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/leap.certs.rst b/docs/api/leap.certs.rst
deleted file mode 100644
index e9cc2524..00000000
--- a/docs/api/leap.certs.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-certs Package
-=============
-
-:mod:`certs` Package
---------------------
-
-.. automodule:: leap.certs
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/leap.crypto.rst b/docs/api/leap.crypto.rst
deleted file mode 100644
index a04e2e1d..00000000
--- a/docs/api/leap.crypto.rst
+++ /dev/null
@@ -1,26 +0,0 @@
-crypto Package
-==============
-
-:mod:`certs` Module
--------------------
-
-.. automodule:: leap.crypto.certs
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`leapkeyring` Module
--------------------------
-
-.. automodule:: leap.crypto.leapkeyring
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
-
- leap.crypto.tests
-
diff --git a/docs/api/leap.crypto.tests.rst b/docs/api/leap.crypto.tests.rst
deleted file mode 100644
index 54ffa62f..00000000
--- a/docs/api/leap.crypto.tests.rst
+++ /dev/null
@@ -1,11 +0,0 @@
-tests Package
-=============
-
-:mod:`test_certs` Module
-------------------------
-
-.. automodule:: leap.crypto.tests.test_certs
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/leap.eip.rst b/docs/api/leap.eip.rst
deleted file mode 100644
index e418461b..00000000
--- a/docs/api/leap.eip.rst
+++ /dev/null
@@ -1,74 +0,0 @@
-eip Package
-===========
-
-:mod:`checks` Module
---------------------
-
-.. automodule:: leap.eip.checks
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`config` Module
---------------------
-
-.. automodule:: leap.eip.config
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`constants` Module
------------------------
-
-.. automodule:: leap.eip.constants
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`eipconnection` Module
----------------------------
-
-.. automodule:: leap.eip.eipconnection
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`exceptions` Module
-------------------------
-
-.. automodule:: leap.eip.exceptions
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`openvpnconnection` Module
--------------------------------
-
-.. automodule:: leap.eip.openvpnconnection
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`specs` Module
--------------------
-
-.. automodule:: leap.eip.specs
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`udstelnet` Module
------------------------
-
-.. automodule:: leap.eip.udstelnet
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
-
- leap.eip.tests
-
diff --git a/docs/api/leap.eip.tests.rst b/docs/api/leap.eip.tests.rst
deleted file mode 100644
index 932a074f..00000000
--- a/docs/api/leap.eip.tests.rst
+++ /dev/null
@@ -1,43 +0,0 @@
-tests Package
-=============
-
-:mod:`data` Module
-------------------
-
-.. automodule:: leap.eip.tests.data
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_checks` Module
--------------------------
-
-.. automodule:: leap.eip.tests.test_checks
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_config` Module
--------------------------
-
-.. automodule:: leap.eip.tests.test_config
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_eipconnection` Module
---------------------------------
-
-.. automodule:: leap.eip.tests.test_eipconnection
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_openvpnconnection` Module
-------------------------------------
-
-.. automodule:: leap.eip.tests.test_openvpnconnection
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/leap.gui.firstrun.rst b/docs/api/leap.gui.firstrun.rst
deleted file mode 100644
index 36470c33..00000000
--- a/docs/api/leap.gui.firstrun.rst
+++ /dev/null
@@ -1,91 +0,0 @@
-firstrun Package
-================
-
-:mod:`firstrun` Package
------------------------
-
-.. automodule:: leap.gui.firstrun
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`intro` Module
--------------------
-
-.. automodule:: leap.gui.firstrun.intro
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`last` Module
-------------------
-
-.. automodule:: leap.gui.firstrun.last
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`login` Module
--------------------
-
-.. automodule:: leap.gui.firstrun.login
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`mixins` Module
---------------------
-
-.. automodule:: leap.gui.firstrun.mixins
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`providerinfo` Module
---------------------------
-
-.. automodule:: leap.gui.firstrun.providerinfo
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`providerselect` Module
-----------------------------
-
-.. automodule:: leap.gui.firstrun.providerselect
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`providersetup` Module
----------------------------
-
-.. automodule:: leap.gui.firstrun.providersetup
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`register` Module
-----------------------
-
-.. automodule:: leap.gui.firstrun.register
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`regvalidation` Module
----------------------------
-
-.. automodule:: leap.gui.firstrun.regvalidation
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`wizard` Module
---------------------
-
-.. automodule:: leap.gui.firstrun.wizard
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/leap.gui.rst b/docs/api/leap.gui.rst
deleted file mode 100644
index a35a7856..00000000
--- a/docs/api/leap.gui.rst
+++ /dev/null
@@ -1,75 +0,0 @@
-gui Package
-===========
-
-:mod:`gui` Package
-------------------
-
-.. automodule:: leap.gui
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`constants` Module
------------------------
-
-.. automodule:: leap.gui.constants
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`locale_rc` Module
------------------------
-
-.. automodule:: leap.gui.locale_rc
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`mainwindow_rc` Module
----------------------------
-
-.. automodule:: leap.gui.mainwindow_rc
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`progress` Module
-----------------------
-
-.. automodule:: leap.gui.progress
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`styles` Module
---------------------
-
-.. automodule:: leap.gui.styles
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`threads` Module
----------------------
-
-.. automodule:: leap.gui.threads
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`utils` Module
--------------------
-
-.. automodule:: leap.gui.utils
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
-
- leap.gui.firstrun
- leap.gui.tests
-
diff --git a/docs/api/leap.gui.tests.rst b/docs/api/leap.gui.tests.rst
deleted file mode 100644
index 60b0a6ca..00000000
--- a/docs/api/leap.gui.tests.rst
+++ /dev/null
@@ -1,59 +0,0 @@
-tests Package
-=============
-
-:mod:`test_firstrun_login` Module
----------------------------------
-
-.. automodule:: leap.gui.tests.test_firstrun_login
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_firstrun_providerselect` Module
-------------------------------------------
-
-.. automodule:: leap.gui.tests.test_firstrun_providerselect
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_firstrun_register` Module
-------------------------------------
-
-.. automodule:: leap.gui.tests.test_firstrun_register
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_firstrun_wizard` Module
-----------------------------------
-
-.. automodule:: leap.gui.tests.test_firstrun_wizard
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_mainwindow_rc` Module
---------------------------------
-
-.. automodule:: leap.gui.tests.test_mainwindow_rc
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_progress` Module
----------------------------
-
-.. automodule:: leap.gui.tests.test_progress
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_threads` Module
---------------------------
-
-.. automodule:: leap.gui.tests.test_threads
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/leap.rst b/docs/api/leap.rst
deleted file mode 100644
index ce715c5d..00000000
--- a/docs/api/leap.rst
+++ /dev/null
@@ -1,41 +0,0 @@
-leap Package
-============
-
-:mod:`leap` Package
--------------------
-
-.. automodule:: leap.__init__
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`_version` Module
-----------------------
-
-.. automodule:: leap._version
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`app` Module
------------------
-
-.. automodule:: leap.app
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
-
- leap.base
- leap.baseapp
- leap.certs
- leap.crypto
- leap.eip
- leap.gui
- leap.testing
- leap.util
-
diff --git a/docs/api/leap.testing.rst b/docs/api/leap.testing.rst
deleted file mode 100644
index dfad1b0c..00000000
--- a/docs/api/leap.testing.rst
+++ /dev/null
@@ -1,43 +0,0 @@
-testing Package
-===============
-
-:mod:`basetest` Module
-----------------------
-
-.. automodule:: leap.testing.basetest
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`https_server` Module
---------------------------
-
-.. automodule:: leap.testing.https_server
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`pyqt` Module
-------------------
-
-.. automodule:: leap.testing.pyqt
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`qunittest` Module
------------------------
-
-.. automodule:: leap.testing.qunittest
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_basetest` Module
----------------------------
-
-.. automodule:: leap.testing.test_basetest
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/leap.util.rst b/docs/api/leap.util.rst
deleted file mode 100644
index dbb69ebe..00000000
--- a/docs/api/leap.util.rst
+++ /dev/null
@@ -1,66 +0,0 @@
-util Package
-============
-
-:mod:`coroutines` Module
-------------------------
-
-.. automodule:: leap.util.coroutines
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`dicts` Module
--------------------
-
-.. automodule:: leap.util.dicts
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`fileutil` Module
-----------------------
-
-.. automodule:: leap.util.fileutil
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`leap_argparse` Module
----------------------------
-
-.. automodule:: leap.util.leap_argparse
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`misc` Module
-------------------
-
-.. automodule:: leap.util.misc
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`translations` Module
---------------------------
-
-.. automodule:: leap.util.translations
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`web` Module
------------------
-
-.. automodule:: leap.util.web
- :members:
- :undoc-members:
- :show-inheritance:
-
-Subpackages
------------
-
-.. toctree::
-
- leap.util.tests
-
diff --git a/docs/api/leap.util.tests.rst b/docs/api/leap.util.tests.rst
deleted file mode 100644
index eb6654c4..00000000
--- a/docs/api/leap.util.tests.rst
+++ /dev/null
@@ -1,19 +0,0 @@
-tests Package
-=============
-
-:mod:`test_fileutil` Module
----------------------------
-
-.. automodule:: leap.util.tests.test_fileutil
- :members:
- :undoc-members:
- :show-inheritance:
-
-:mod:`test_leap_argparse` Module
---------------------------------
-
-.. automodule:: leap.util.tests.test_leap_argparse
- :members:
- :undoc-members:
- :show-inheritance:
-
diff --git a/docs/api/modules.rst b/docs/api/modules.rst
deleted file mode 100644
index d49776ae..00000000
--- a/docs/api/modules.rst
+++ /dev/null
@@ -1,7 +0,0 @@
-leap
-====
-
-.. toctree::
- :maxdepth: 4
-
- leap
diff --git a/docs/conf.py b/docs/conf.py
deleted file mode 100644
index 05c8cf5b..00000000
--- a/docs/conf.py
+++ /dev/null
@@ -1,242 +0,0 @@
-# -*- coding: utf-8 -*-
-#
-# LEAP documentation build configuration file, created by
-# sphinx-quickstart on Sun Jul 22 18:32:05 2012.
-#
-# This file is execfile()d with the current directory set to its containing dir.
-#
-# Note that not all possible configuration values are present in this
-# autogenerated file.
-#
-# All configuration values have a default; values that are commented out
-# serve to show the default.
-
-import sys, os
-
-# If extensions (or modules to document with autodoc) are in another directory,
-# add these directories to sys.path here. If the directory is relative to the
-# documentation root, use os.path.abspath to make it absolute, like shown here.
-#sys.path.insert(0, os.path.abspath('.'))
-
-# -- General configuration -----------------------------------------------------
-
-# If your documentation needs a minimal Sphinx version, state it here.
-#needs_sphinx = '1.0'
-
-# Add any Sphinx extension module names here, as strings. They can be extensions
-# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
-extensions = ['sphinx.ext.autodoc']
-
-# Add any paths that contain templates here, relative to this directory.
-templates_path = ['_templates']
-
-# The suffix of source filenames.
-source_suffix = '.rst'
-
-# The encoding of source files.
-#source_encoding = 'utf-8-sig'
-
-# The master toctree document.
-master_doc = 'index'
-
-# General information about the project.
-project = u'LEAP'
-copyright = u'2012, The LEAP Encryption Access Project'
-
-# The version info for the project you're documenting, acts as replacement for
-# |version| and |release|, also used in various other places throughout the
-# built documents.
-#
-# The short X.Y version.
-version = '0.1.0'
-# The full version, including alpha/beta/rc tags.
-release = '0.1.0'
-
-# The language for content autogenerated by Sphinx. Refer to documentation
-# for a list of supported languages.
-language = "en_US"
-
-# There are two options for replacing |today|: either, you set today to some
-# non-false value, then it is used:
-#today = ''
-# Else, today_fmt is used as the format for a strftime call.
-#today_fmt = '%B %d, %Y'
-
-# List of patterns, relative to source directory, that match files and
-# directories to ignore when looking for source files.
-exclude_patterns = ['_build']
-
-# The reST default role (used for this markup: `text`) to use for all documents.
-#default_role = None
-
-# If true, '()' will be appended to :func: etc. cross-reference text.
-#add_function_parentheses = True
-
-# If true, the current module name will be prepended to all description
-# unit titles (such as .. function::).
-#add_module_names = True
-
-# If true, sectionauthor and moduleauthor directives will be shown in the
-# output. They are ignored by default.
-#show_authors = False
-
-# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
-
-# A list of ignored prefixes for module index sorting.
-#modindex_common_prefix = []
-
-
-# -- Options for HTML output ---------------------------------------------------
-
-# The theme to use for HTML and HTML Help pages. See the documentation for
-# a list of builtin themes.
-html_theme = 'default'
-
-# Theme options are theme-specific and customize the look and feel of a theme
-# further. For a list of options available for each theme, see the
-# documentation.
-#html_theme_options = {}
-
-# Add any paths that contain custom themes here, relative to this directory.
-#html_theme_path = []
-
-# The name for this set of Sphinx documents. If None, it defaults to
-# "<project> v<release> documentation".
-#html_title = None
-
-# A shorter title for the navigation bar. Default is the same as html_title.
-#html_short_title = None
-
-# The name of an image file (relative to this directory) to place at the top
-# of the sidebar.
-html_logo = "../data/images/leap-color-small.png"
-
-# The name of an image file (within the static path) to use as favicon of the
-# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
-# pixels large.
-html_favicon = "../data/images/favicon.ico"
-
-# Add any paths that contain custom static files (such as style sheets) here,
-# relative to this directory. They are copied after the builtin static files,
-# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
-
-# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
-# using the given strftime format.
-html_last_updated_fmt = '%b %d, %Y'
-
-# If true, SmartyPants will be used to convert quotes and dashes to
-# typographically correct entities.
-#html_use_smartypants = True
-
-# Custom sidebar templates, maps document names to template names.
-#html_sidebars = {}
-
-# Additional templates that should be rendered to pages, maps page names to
-# template names.
-#html_additional_pages = {}
-
-# If false, no module index is generated.
-#html_domain_indices = True
-
-# If false, no index is generated.
-#html_use_index = True
-
-# If true, the index is split into individual pages for each letter.
-#html_split_index = False
-
-# If true, links to the reST sources are added to the pages.
-#html_show_sourcelink = True
-
-# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-#html_show_sphinx = True
-
-# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
-#html_show_copyright = True
-
-# If true, an OpenSearch description file will be output, and all pages will
-# contain a <link> tag referring to it. The value of this option must be the
-# base URL from which the finished HTML is served.
-#html_use_opensearch = ''
-
-# This is the file name suffix for HTML files (e.g. ".xhtml").
-#html_file_suffix = None
-
-# Output file base name for HTML help builder.
-htmlhelp_basename = 'LEAPdoc'
-
-
-# -- Options for LaTeX output --------------------------------------------------
-
-latex_elements = {
-# The paper size ('letterpaper' or 'a4paper').
-#'papersize': 'letterpaper',
-
-# The font size ('10pt', '11pt' or '12pt').
-#'pointsize': '10pt',
-
-# Additional stuff for the LaTeX preamble.
-#'preamble': '',
-}
-
-# Grouping the document tree into LaTeX files. List of tuples
-# (source start file, target name, title, author, documentclass [howto/manual]).
-latex_documents = [
- ('index', 'LEAP.tex', u'LEAP Documentation',
- u'The Leap Project', 'manual'),
-]
-
-# The name of an image file (relative to this directory) to place at the top of
-# the title page.
-#latex_logo = None
-
-# For "manual" documents, if this is true, then toplevel headings are parts,
-# not chapters.
-#latex_use_parts = False
-
-# If true, show page references after internal links.
-#latex_show_pagerefs = False
-
-# If true, show URL addresses after external links.
-#latex_show_urls = False
-
-# Documents to append as an appendix to all manuals.
-#latex_appendices = []
-
-# If false, no module index is generated.
-#latex_domain_indices = True
-
-
-# -- Options for manual page output --------------------------------------------
-
-# One entry per manual page. List of tuples
-# (source start file, name, description, authors, manual section).
-man_pages = [
- ('index', 'leap', u'LEAP Documentation',
- [u'The Leap Project'], 1)
-]
-
-# If true, show URL addresses after external links.
-#man_show_urls = False
-
-
-# -- Options for Texinfo output ------------------------------------------------
-
-# Grouping the document tree into Texinfo files. List of tuples
-# (source start file, target name, title, author,
-# dir menu entry, description, category)
-texinfo_documents = [
- ('index', 'LEAP', u'LEAP Documentation',
- u'The Leap Project', 'LEAP', 'One line description of project.',
- 'Miscellaneous'),
-]
-
-# Documents to append as an appendix to all manuals.
-#texinfo_appendices = []
-
-# If false, no module index is generated.
-#texinfo_domain_indices = True
-
-# How to display URL addresses: 'footnote', 'no', or 'inline'.
-#texinfo_show_urls = 'footnote'
diff --git a/docs/config/files.rst b/docs/config/files.rst
deleted file mode 100644
index 0f4abead..00000000
--- a/docs/config/files.rst
+++ /dev/null
@@ -1,16 +0,0 @@
-.. _files:
-
-Configuration Files
-===================
-
-This document covers the different configuration files used by the LEAP Client.
-
-leap.conf
----------
-
-TBD
-
-eip.json
---------
-
-TBD
diff --git a/docs/dev/authors.rst b/docs/dev/authors.rst
deleted file mode 100644
index e69de29b..00000000
--- a/docs/dev/authors.rst
+++ /dev/null
diff --git a/docs/dev/environment.rst b/docs/dev/environment.rst
deleted file mode 100644
index c3868b81..00000000
--- a/docs/dev/environment.rst
+++ /dev/null
@@ -1,121 +0,0 @@
-.. _environment:
-
-Setting up a development environment
-====================================
-
-This document covers how to get an enviroment ready to contribute code to the LEAP Client.
-
-Cloning the repo
-----------------
-.. note::
- Stable releases will be in *master* branch (nothing there yet, move on!).
- Development code lives in *develop* branch.
-
-::
-
- git clone git://leap.se/leap_client
-
-Base Dependencies
-------------------
-Leap client depends on these libraries:
-
-* `python 2.6 or 2.7`
-* `qt4` libraries (see also :ref:`Troubleshooting PyQt install <pyqtvirtualenv>` about how to install inside your virtualenv)
-* `openssl`
-* `openvpn <http://openvpn.net/index.php/open-source/345-openvpn-project.html>`_
-
-Debian
-^^^^^^
-In debian-based systems::
-
- $ apt-get install openvpn python-qt4 python-crypto python-openssl
-
-To install the software from sources::
-
- $ apt-get install python-pip python-dev
-
-.. _virtualenv:
-
-Working with virtualenv
------------------------
-
-Intro
-^^^^^^^^^^^^^^^^^^^
-
-*Virtualenv* is the *Virtual Python Environment builder*.
-
-It is a tool to create isolated Python environments.
-
-The basic problem being addressed is one of dependencies and versions, and indirectly permissions. Imagine you have an application that needs version 1 of LibFoo, but another application requires version 2. How can you use both these applications? If you install everything into /usr/lib/python2.7/site-packages (or whatever your platform's standard location is), it's easy to end up in a situation where you unintentionally upgrade an application that shouldn't be upgraded.
-
-Read more about it in the `project documentation page <http://pypi.python.org/pypi/virtualenv/>`_.
-
-
-Create and activate your dev environment
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-::
-
- $ virtualenv </path/to/new/environment>
- $ source </path/to/new/environment>/bin/activate
-
-Install python dependencies
-^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-You can install python dependencies with pip. If you do it inside your working environment, they will be installed avoiding the need for administrative permissions::
-
- $ pip install -r pkg/requirements.pip
-
-.. _pyqtvirtualenv:
-
-Troubleshooting PyQt install inside a virtualenv
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-If you attempt to install PyQt inside a virtualenv using pip, it will fail because PyQt4 does not use the standard setup.py mechanism.
-
-As a workaround, you can run the following script after creating your virtualenv. It will symlink to your global PyQt installation (*this is the recommended way if you are running a debian-based system*)::
-
- $ pkg/postmkvenv.sh
-
-A second option if that does not work for you would be to install PyQt globally and pass the ``--site-packages`` option when you are creating your virtualenv::
-
- $ apt-get install python-qt4
- $ virtualenv --site-packages .
-
-Or, if you prefer, you can also `download the official PyQt tarball <http://www.riverbankcomputing.com/software/pyqt/download>`_ and execute ``configure.py`` in the root folder of their distribution, which generates a ``Makefile``::
-
- $ python configure.py
- $ make && make install
-
-.. note::
- this section could be completed with useful options that can be passed to the virtualenv command (e.g., to make portable paths, site-packages, ...).
-
-
-.. _copyscriptfiles:
-
-Copy script files
------------------
-
-The openvpn invocation expects some files to be in place. If you have not installed `leap-client` from a debian package, you must copy these files manually::
-
- $ sudo mkdir -p /etc/leap
- $ sudo cp pkg/linux/resolv-update /etc/leap
-
-.. _policykit:
-
-Running openvpn without root privileges
----------------------------------------
-
-In linux, we are using ``policykit`` to be able to run openvpn without root privileges, and a policy file is needed to be installed for that to be possible.
-The setup script tries to install the policy file when installing the client system-wide, so if you have installed the client in your global site-packages at least once it should have copied this file for you.
-
-If you *only* are running the client from inside a virtualenv, you will need to copy this file by hand::
-
- $ sudo cp pkg/linux/polkit/net.openvpn.gui.leap.policy /usr/share/polkit-1/actions/
-
-
-Missing Authentication agent
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-If you are running a desktop other than gnome or unity, you might get an error saying that you are not running the authentication agent. You can launch it like this::
-
- /usr/lib/policykit-1-gnome/polkit-gnome-authentication-agent-1 &
diff --git a/docs/dev/internals.rst b/docs/dev/internals.rst
deleted file mode 100644
index 8bb19211..00000000
--- a/docs/dev/internals.rst
+++ /dev/null
@@ -1,12 +0,0 @@
-.. _internals:
-
-Internals
-=========
-
-This section covers briefly the internal organization of the LEAP Client source tree.
-
-.. note::
-
- very unfinished.
-
-`TBD`
diff --git a/docs/dev/internationalization.rst b/docs/dev/internationalization.rst
deleted file mode 100644
index 1a9af0be..00000000
--- a/docs/dev/internationalization.rst
+++ /dev/null
@@ -1,117 +0,0 @@
-.. _i18n:
-
-Internationalization
-====================
-
-This part of the documentation covers the localization and translation of LEAP Client.
-Because we want to *bring fire to the people*, in as many countries and languages as possible.
-
-Translating the LEAP Client PyQt Application
---------------------------------------------
-
-.. raw:: html
-
- <div><a target="_blank" style="text-decoration:none; color:black; font-size:66%" href="https://www.transifex.com/projects/p/leap-client/resource/leap-client/" title="See more information on Transifex.com">Top translations: leap-client » leap-client</a><br/><img border="0" src="https://www.transifex.com/projects/p/leap-client/resource/leap-client/chart/image_png"/><br/><a target="_blank" href="https://www.transifex.com/"><img border="0" src="https://ds0k0en9abmn1.cloudfront.net/static/charts/images/tx-logo-micro.646b0065fce6.png"/></a></div>
-
-
-For translators
-^^^^^^^^^^^^^^^
-.. note::
- We should probably move the translators info to a top level section of the docs, and leave this
- as internal notes.
-
-
-We are using `transifex <http://transifex.com/projects/p/leap-client>`_ to coordinate translation efforts. If you want to contribute, just sign up there and ...
-
-.. note::
- ... and what??
-
-For devs: i18n conventions
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-.. note::
- should say something about our special cases (provider labels and exceptions) when we get decision about it.
-
-Refer to `pyqt documentation <http://www.riverbankcomputing.co.uk/static/Docs/PyQt4/html/i18n.html>`_.
-
-tl;dr;::
-
- self.tr('your string')
-
-for any string that you want to be translated, as long as the instance derives from ``QObject``.
-
-If you have to translate something that it is not a ``QObject``, use the magic leap ``translate`` method:
-
-
-.. code-block:: python
-
- from leap.util.translations import translate
-
- class Foo(object):
- bar = translate(<Context>, <string>, <comment>)
-
-
-.. Note about this: there seems to be some problems with the .tr method
- so the translate method could actually be the preferred thing in all the cases.
- Still missing what to do for language labels (json-based).
- --kali
-
-For i18n maintainers
-^^^^^^^^^^^^^^^^^^^^
-
-You need ``pylupdate4`` and ``lrelease`` for these steps. To get it, in debian::
-
- $ apt-get install pyqt4-dev-tools qt4-linguist-tools
-
-If you do not already have it, install the ``transifex-client`` from the cheese shop::
-
- pip install transifex-client
-
-You can learn more about the transifex-client `here <http://help.transifex.com/features/client/index.html>`_.
-
-**1.** Add any new source files to the project file, ``data/leap_client.pro``. *We should automate this with some templating, it's tedious.*
-
-**2.** Update the source .ts file ``data/ts/en_US.ts``.::
-
- $ make translations
-
-**3.** Push source .ts file to transifex::
-
- $ tx push -s
-
-**4.** Let the translation fairies do their work...
-
-**5.** *Et voila!* Get updated .ts files for each language from ``Transifex``. For instance, to pull updated spanish translations::
-
- $ tx pull -l es
- Pulling new translations for resource leap-client.leap-client (source: data/ts/en_US.ts)
- -> es: data/translations/es.ts
- Done.
-
-
-Note that there is a configuration option in ``.tx/config`` for setting the minimum completion percentage needed to be able to actually pull a resource.
-
-**6.** Generate .qm files from the updated .ts files::
-
- $ make translations
-
-and yes, it's the same command than in step 2. One less thing to remember :)
-
-**7.** Check that the .qm for the language you're working with is listed in ``data/resources/locale.qrc`` file. That should take the translated files from ``data/translations``
-
-**8.** Re-generate ``src/leap/gui/locale_qrc``. This is the embedded resource file that we load in the main app entry point; and from where we load the data for the qt translator object::
-
- $ make resources
-
-If you want to try it, just set your LANG environment variable::
-
- $ LANG=es_ES leap-client
-
-
-Translating the Documentation
-------------------------------
-
-.. note::
- ...unfinished
-
-`translating sphinx docs <http://sphinx-doc.org/intl.html>`_
diff --git a/docs/dev/resources.rst b/docs/dev/resources.rst
deleted file mode 100644
index 7cfa2b70..00000000
--- a/docs/dev/resources.rst
+++ /dev/null
@@ -1,14 +0,0 @@
-.. _resources:
-
-PyQt Resource files
-===================
-
-Compiling resource/ui files
----------------------------
-
-You should refresh resource/ui files every time you change an image or a resource/ui (.ui / .qc). From the root folder::
-
- % make ui
- % make resources
-
-As there are some tests to guard against unwanted resource updates, you will have to update the resource hash in those failing tests.
diff --git a/docs/dev/tests.rst b/docs/dev/tests.rst
deleted file mode 100644
index 7f5fbaaf..00000000
--- a/docs/dev/tests.rst
+++ /dev/null
@@ -1,62 +0,0 @@
-.. _tests:
-
-Running and writing tests
-=========================
-
-.. note::
- should include seeAlso to virtualenv
-
-This section covers the documentation about the tests for the LEAP Client code.
-All patches should have tests for them ...
-
-
-Testing dependencies
---------------------
-
-have a look at ``pkg/test-requirements.pip``
-The ``./run_tests.sh`` command should install all of them in your virtualenv for you.
-
-If you prefer to install them system wide, this should do in a debian system::
-
- $ apt-get install python-nose python-mock python-coverage
-
-
-Running tests
--------------
-
-There is a convenience script at ``./run_tests.sh``
-
-If you want to run specific tests, pass the (sub)module to nose::
-
- $ nosetests leap.util
-
-or::
-
- $ nosetests leap.util.tests.test_leap_argparse
-
-Hint: colorized output
-^^^^^^^^^^^^^^^^^^^^^^
-
-Install ``rednose`` locally, export the ``NOSE_REDNOSE`` variable, and give your eyes a rest :)::
-
- (leap_client)% pip install rednose
- (leap_client)% export NOSE_REDNOSE=1
-
-Testing all the supported python versions
------------------------------------------
-
-For running testsuite against all the supported python versions (currently 2.6 and 2.7), run::
-
- % tox -v
-
-Coverage reports
-----------------
-
-Pass the ``-c`` flat to the ``run_tests.sh`` script::
-
- $ run_tests.sh -c
-
-Using ``coverage`` it will generate beautiful html reports that you can access pointing your browser to ``docs/covhtml/index.html``
-
-.. note::
- The coverage reports will not be generated if all tests are not passing.
diff --git a/docs/dev/todo.rst b/docs/dev/todo.rst
deleted file mode 100644
index e69de29b..00000000
--- a/docs/dev/todo.rst
+++ /dev/null
diff --git a/docs/dev/workflow.rst b/docs/dev/workflow.rst
deleted file mode 100644
index 5ceccca4..00000000
--- a/docs/dev/workflow.rst
+++ /dev/null
@@ -1,41 +0,0 @@
-.. _workflow:
-
-Development Workflow
-====================
-
-This section documents the workflow that the LEAP project team follows and expects for the code contributions.
-
-Code formatting
----------------
-In one word: `PEP8`_.
-
-`autopep8` might be your friend. or eat your code.
-
-.. _`PEP8`: http://www.python.org/dev/peps/pep-0008/
-.. _`autopep8`: http://pypi.python.org/pypi/autopep8
-
-Dependencies
-------------
-If you introduce a new dependency, please add it under ``pkg/requirements`` or ``pkg/test-requirements`` as appropiate, under the proper module section.
-
-Git flow
---------
-See `A successful git branching model <http://nvie.com/posts/a-successful-git-branching-model/>`_ for more information. The slight modification we make is that release tags are made in the release branch before getting merged to master, rather than getting tagged in master.
-
-.. image:: https://leap.se/code/attachments/13/git-branching-model.png
-
-The author of the aforementioned post has also a handy pdf version of it: `branching_model.pdf`_
-
-A couple of tools that help to follow this process are `git-flow`_ and `git-sweep`_.
-
-.. _`branching_model.pdf`: https://leap.se/code/attachments/14/Git-branching-model.pdf
-.. _`git-flow`: https://github.com/nvie/gitflow
-.. _`git-sweep`: http://pypi.python.org/pypi/git-sweep
-
-Merge into integration branch
------------------------------
-All code ready to be merged into the integration branch is expected to:
-
-* Have tests
-* Be documented
-* Pass existing tests: do **run_tests.sh** and **tox -v**. All feature branches are automagically built by our `buildbot farm <http://lemur.leap.se:8010/grid>`_. So please check your branch is green before merging it it to `develop`. Rebasing against the current tip of the integration when possible is preferred in order to keep a clean history.
diff --git a/docs/index.rst b/docs/index.rst
deleted file mode 100644
index 3e1a603f..00000000
--- a/docs/index.rst
+++ /dev/null
@@ -1,92 +0,0 @@
-.. LEAP documentation master file, created by
- sphinx-quickstart on Sun Jul 22 18:32:05 2012.
- You can adapt this file completely to your liking, but it should at least
- contain the root `toctree` directive.
-
-LEAP Client
-=====================================
-
-Release v\ |version|. (`Impatient? jump to the` :ref:`Installation <install>` `section!`)
-
-.. if you change this paragraph, change it in user/intro too
-The **LEAP Encryption Access Project Client** is a :ref:`GPL3 Licensed <gpl3>` multiplatform client, written in python using PyQt4, that supports the features offered by :ref:`the LEAP Platform <leapplatform>`. Currently is being tested on Linux, support for OSX and Windows will come soon.
-
-User Guide
-----------
-
-.. toctree::
- :maxdepth: 2
-
- user/intro
- user/install
- user/running
-
-Tester Guide
-------------
-
-This part of the documentation details how to fetch the last development version and how to report bugs.
-
-.. toctree::
- :maxdepth: 1
-
- testers/howto
-
-Hackers Guide
----------------
-
-If you want to contribute to the project, we wrote this for you.
-
-.. toctree::
- :maxdepth: 1
-
- dev/environment
- dev/tests
- dev/workflow
- dev/resources
- dev/internationalization
-
-.. dev/internals
- dev/authors
- dev/todo
- dev/workflow
-
-Packager Guide
----------------
-
-Docs related to the process of building and releasing a version of the client.
-
-.. toctree::
- :maxdepth: 1
-
- pkg/debian
- pkg/osx
- pkg/win
-
-
-Directories and Files
----------------------
-
-Different directories and files used for the configuration of the client.
-
-.. toctree::
- :maxdepth: 1
-
- config/files
-
-
-API Documentation
------------------
-
-If you are looking for a reference to specific classes or functions, you are likely to find it here
-
-.. I should investigate a bit more how to skip some things, and how to give nice format
- to the docstrings.
- Maybe we should not have sphinx-apidocs building everything, but a minimal index of our own.
-
-.. note::
- when it's a bit more polished, that's it :)
-
-.. toctree::
- :maxdepth: 2
-
- api/leap
diff --git a/docs/make.bat b/docs/make.bat
deleted file mode 100644
index b241ea34..00000000
--- a/docs/make.bat
+++ /dev/null
@@ -1,190 +0,0 @@
-@ECHO OFF
-
-REM Command file for Sphinx documentation
-
-if "%SPHINXBUILD%" == "" (
- set SPHINXBUILD=sphinx-build
-)
-set BUILDDIR=_build
-set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
-set I18NSPHINXOPTS=%SPHINXOPTS% .
-if NOT "%PAPER%" == "" (
- set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
- set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
-)
-
-if "%1" == "" goto help
-
-if "%1" == "help" (
- :help
- echo.Please use `make ^<target^>` where ^<target^> is one of
- echo. html to make standalone HTML files
- echo. dirhtml to make HTML files named index.html in directories
- echo. singlehtml to make a single large HTML file
- echo. pickle to make pickle files
- echo. json to make JSON files
- echo. htmlhelp to make HTML files and a HTML help project
- echo. qthelp to make HTML files and a qthelp project
- echo. devhelp to make HTML files and a Devhelp project
- echo. epub to make an epub
- echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
- echo. text to make text files
- echo. man to make manual pages
- echo. texinfo to make Texinfo files
- echo. gettext to make PO message catalogs
- echo. changes to make an overview over all changed/added/deprecated items
- echo. linkcheck to check all external links for integrity
- echo. doctest to run all doctests embedded in the documentation if enabled
- goto end
-)
-
-if "%1" == "clean" (
- for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
- del /q /s %BUILDDIR%\*
- goto end
-)
-
-if "%1" == "html" (
- %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/html.
- goto end
-)
-
-if "%1" == "dirhtml" (
- %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
- goto end
-)
-
-if "%1" == "singlehtml" (
- %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
- goto end
-)
-
-if "%1" == "pickle" (
- %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can process the pickle files.
- goto end
-)
-
-if "%1" == "json" (
- %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can process the JSON files.
- goto end
-)
-
-if "%1" == "htmlhelp" (
- %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can run HTML Help Workshop with the ^
-.hhp project file in %BUILDDIR%/htmlhelp.
- goto end
-)
-
-if "%1" == "qthelp" (
- %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; now you can run "qcollectiongenerator" with the ^
-.qhcp project file in %BUILDDIR%/qthelp, like this:
- echo.^> qcollectiongenerator %BUILDDIR%\qthelp\LEAP.qhcp
- echo.To view the help file:
- echo.^> assistant -collectionFile %BUILDDIR%\qthelp\LEAP.ghc
- goto end
-)
-
-if "%1" == "devhelp" (
- %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished.
- goto end
-)
-
-if "%1" == "epub" (
- %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The epub file is in %BUILDDIR%/epub.
- goto end
-)
-
-if "%1" == "latex" (
- %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
- goto end
-)
-
-if "%1" == "text" (
- %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The text files are in %BUILDDIR%/text.
- goto end
-)
-
-if "%1" == "man" (
- %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The manual pages are in %BUILDDIR%/man.
- goto end
-)
-
-if "%1" == "texinfo" (
- %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
- goto end
-)
-
-if "%1" == "gettext" (
- %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
- if errorlevel 1 exit /b 1
- echo.
- echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
- goto end
-)
-
-if "%1" == "changes" (
- %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
- if errorlevel 1 exit /b 1
- echo.
- echo.The overview file is in %BUILDDIR%/changes.
- goto end
-)
-
-if "%1" == "linkcheck" (
- %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
- if errorlevel 1 exit /b 1
- echo.
- echo.Link check complete; look for any errors in the above output ^
-or in %BUILDDIR%/linkcheck/output.txt.
- goto end
-)
-
-if "%1" == "doctest" (
- %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
- if errorlevel 1 exit /b 1
- echo.
- echo.Testing of doctests in the sources finished, look at the ^
-results in %BUILDDIR%/doctest/output.txt.
- goto end
-)
-
-:end
diff --git a/docs/man/leap.1 b/docs/man/leap.1
deleted file mode 100644
index aef24d85..00000000
--- a/docs/man/leap.1
+++ /dev/null
@@ -1,83 +0,0 @@
-.\" Man page generated from reStructeredText.
-.
-.TH LEAP-CLIENT 1 "2013-01-30" "0.2" "General Commands Manual"
-.SH NAME
-leap-client \- graphical client to control LEAP, the encrypted internet access toolkit.
-.
-.nr rst2man-indent-level 0
-.
-.de1 rstReportMargin
-\\$1 \\n[an-margin]
-level \\n[rst2man-indent-level]
-level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
--
-\\n[rst2man-indent0]
-\\n[rst2man-indent1]
-\\n[rst2man-indent2]
-..
-.de1 INDENT
-.\" .rstReportMargin pre:
-. RS \\$1
-. nr rst2man-indent\\n[rst2man-indent-level] \\n[an-margin]
-. nr rst2man-indent-level +1
-.\" .rstReportMargin post:
-..
-.de UNINDENT
-. RE
-.\" indent \\n[an-margin]
-.\" old: \\n[rst2man-indent\\n[rst2man-indent-level]]
-.nr rst2man-indent-level -1
-.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
-.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
-..
-.SH SYNOPSIS
-.sp
-leap\-client [\-h] [\-d] [\-l [LOG FILE]] [\-\-openvpn\-verbosity [OPENVPN_VERB]]
-.SH DESCRIPTION
-.sp
-\fIleap\-client\fP is a graphical client to control LEAP, the encrypted internet access toolkit.
-.sp
-When launched, it places an icon in the system tray from where the LEAP services can be controlled.
-.SH OPTIONS
-.SS general options
-.sp
-\fB\-h, \-\-help\fP Print a help message and exit.
-.sp
-\fB\-d, \-\-debug\fP Launches client in debug mode, writing debug info to stdout.
-.sp
-\fB\-\-\-logfile=<file>\fP Writes log to file.
-.SS openvpn options
-.sp
-\fB\-\-openvpn\-verbosity\fP [0\-5] Verbosity level for openvpn logs.
-.SH WARNING
-.sp
-This software is still in early alfa testing. So don\(aqt trust your life to it!
-.sp
-At the current time, the LEAP Client is not compatible with \fBopenresolv\fP, but it works with \fBresolvconf\fP.
-.SH FILES
-.SS /etc/leap/resolv\-update
-.sp
-Post up/down script passed to openvpn. It writes /etc/resolv.conf to avoid dns leaks, and restores the original resolv.conf on exit.
-.SS /etc/leap/resolv\-head
-.SS /etc/leap/resolv\-tail
-.sp
-Custom entries that will appear in the written resolv.conf
-.SS /usr/share/polkit\-1/actions/net.openvpn.gui.leap.policy
-.sp
-PolicyKit policy file, used for granting access to openvpn without the need of entering a password each time.
-.SS ~/.config/leap/
-.sp
-Main config folder
-.SS ~/.config/leap/leap.conf
-.sp
-GUI options
-.SH BUGS
-.sp
-Please report any bugs to \fI\%https://leap.se/code\fP
-.SH AUTHOR
-LEAP Encryption Access Project https://leap.se
-.SH COPYRIGHT
-GPLv3+
-.\" Generated by docutils manpage writer.
-.\"
-.
diff --git a/docs/man/leap.1.rst b/docs/man/leap.1.rst
deleted file mode 100644
index 1ef5b3cc..00000000
--- a/docs/man/leap.1.rst
+++ /dev/null
@@ -1,86 +0,0 @@
-===========
-leap-client
-===========
-
-------------------------------------------------------------------------
-graphical client to control LEAP, the encrypted internet access toolkit.
-------------------------------------------------------------------------
-
-:Author: LEAP Encryption Access Project https://leap.se
-:Date: 2013-01-30
-:Copyright: GPLv3+
-:Version: 0.2
-:Manual section: 1
-:Manual group: General Commands Manual
-
-SYNOPSIS
-========
-
-leap-client [-h] [-d] [-l [LOG FILE]] [--openvpn-verbosity [OPENVPN_VERB]]
-
-DESCRIPTION
-===========
-
-*leap-client* is a graphical client to control LEAP, the encrypted internet access toolkit.
-
-When launched, it places an icon in the system tray from where the LEAP services can be controlled.
-
-
-OPTIONS
-=======
-
-general options
----------------
-
-**-h, --help** Print a help message and exit.
-
-**-d, --debug** Launches client in debug mode, writing debug info to stdout.
-
-**---logfile=<file>** Writes log to file.
-
-openvpn options
----------------
-
-**--openvpn-verbosity** [0-5] Verbosity level for openvpn logs.
-
-
-WARNING
-=======
-
-This software is still in early alfa testing. So don't trust your life to it!
-
-At the current time, the LEAP Client is not compatible with ``openresolv``, but it works with ``resolvconf``.
-
-FILES
-=====
-
-/etc/leap/resolv-update
------------------------
-Post up/down script passed to openvpn. It writes /etc/resolv.conf to avoid dns leaks, and restores the original resolv.conf on exit.
-
-/etc/leap/resolv-head
----------------------
-/etc/leap/resolv-tail
----------------------
-
-Custom entries that will appear in the written resolv.conf
-
-/usr/share/polkit-1/actions/net.openvpn.gui.leap.policy
--------------------------------------------------------
-
-PolicyKit policy file, used for granting access to openvpn without the need of entering a password each time.
-
-~/.config/leap/
----------------
-
-Main config folder
-
-~/.config/leap/leap.conf
-------------------------
-
-GUI options
-
-BUGS
-====
-
-Please report any bugs to https://leap.se/code
diff --git a/docs/pkg/debian.rst b/docs/pkg/debian.rst
deleted file mode 100644
index e98032a5..00000000
--- a/docs/pkg/debian.rst
+++ /dev/null
@@ -1,28 +0,0 @@
-.. _debian:
-
-Debian
-======
-
-This section documents all related to the debian package.
-
-
-Dependencies
-------------
-
-* ``openvpn``
-* ``python-qt4``
-* ``python-crypto``
-* ``python setuptools``
-* ``python-requests``
-* ``python-openssl``
-
-.. note::
- these need a version that is not found in the current debian stable or in ubuntu 12.04.
- They will be packaged... soon.
-
-* ``python-keyring``
-
-For tests
-^^^^^^^^^
-* ``python-nose``, ``python-mock``, ``python-coverage``
-
diff --git a/docs/pkg/osx.rst b/docs/pkg/osx.rst
deleted file mode 100644
index e69de29b..00000000
--- a/docs/pkg/osx.rst
+++ /dev/null
diff --git a/docs/pkg/win.rst b/docs/pkg/win.rst
deleted file mode 100644
index e69de29b..00000000
--- a/docs/pkg/win.rst
+++ /dev/null
diff --git a/docs/testers/howto.rst b/docs/testers/howto.rst
deleted file mode 100644
index c4a928ed..00000000
--- a/docs/testers/howto.rst
+++ /dev/null
@@ -1,111 +0,0 @@
-.. _testhowto:
-
-Howto for Testers
-=================
-
-This document covers a how-to guide to:
-
-#. Quickly fetching latest development code, and
-#. Reporting bugs.
-
-Let's go!
-
-.. _fetchinglatest:
-
-Fetching latest development code
----------------------------------
-
-To allow rapid testing in different platforms, we have put together a quick script that is able to fetch latest development code. It more or less does all the steps covered in the :ref:`Setting up a Work Enviroment <environment>` section, only that in a more compact way suitable (ahem) also for non developers.
-
-Install dependencies
-^^^^^^^^^^^^^^^^^^^^
-First, install all the base dependencies plus git, virtualenv and development files needed to compile several extensions::
-
- apt-get install openvpn git-core python-dev python-qt4 python-setuptools python-virtualenv
-
-
-Bootstrap script
-^^^^^^^^^^^^^^^^
-.. note::
- This will fetch the *develop* branch. If you want to test another branch, just change it in the line starting with *pip install...*. Alternatively, bug kali so she add an option branch to a decent script.
-
-.. note::
- This script could make use of the after_install hook. Read http://pypi.python.org/pypi/virtualenv/
-
-Download and source the following script in the parent folder where you want your testing build to be downloaded. For instance, to `/tmp/`:
-
-.. code-block:: bash
-
- cd /tmp
- wget https://raw.github.com/leapcode/leap_client/develop/pkg/scripts/leap_client_bootstrap.sh
- source leap_client_bootstrap.sh
-
-Tada! If everything went well, you should be able to run the client by typing::
-
- bin/leap-client
-
-Noticed that your prompt changed? That was *virtualenv*. Keep reading...
-
-Activating the virtualenv
-^^^^^^^^^^^^^^^^^^^^^^^^^
-The above bootstrap script has fetched latest code inside a virtualenv, which is an isolated, *virtual* python local environment that avoids messing with your global paths. You will notice you are *inside* a virtualenv because you will see a modified prompt reminding it to you (*leap-client-testbuild* in this case).
-
-Thus, if you forget to *activate your virtualenv*, the client will not run from the local path, and it will be looking for something else in your global path. So, **you have to remember to activate your virtualenv** each time that you open a new shell and want to execute the code you are testing. You can do this by typing::
-
- $ source bin/activate
-
-from the directory where you *sourced* the bootstrap script.
-
-Refer to :ref:`Working with virtualenv <virtualenv>` to learn more about virtualenv.
-
-Copying config files
-^^^^^^^^^^^^^^^^^^^^
-
-If you have never installed the ``leap-client`` globally, **you need to copy some files to its proper path before running it for the first time** (you only need to do this once). This, unless the virtualenv-based operations, will need root permissions. See :ref:`copy script files <copyscriptfiles>` and :ref:`running openvpn without root privileges <policykit>` sections for more info on this. In short::
-
- $ sudo cp pkg/linux/polkit/net.openvpn.gui.leap.policy /usr/share/polkit-1/actions/
- $ sudo mkdir -p /etc/leap
- $ sudo cp pkg/linux/resolv-update /etc/leap
-
-Local config files
-^^^^^^^^^^^^^^^^^^^
-
-If you want to start fresh without config files, just move them. In linux::
-
- mv ~/.config/leap ~/.config/leap.old
-
-Pulling latest changes
-^^^^^^^^^^^^^^^^^^^^^^
-
-You should be able to cd into the downloaded repo and pull latest changes::
-
- (leap-client-testbuild)$ cd src/leap-client
- (leap-client-testbuild)$ git pull origin develop
-
-However, as a tester you are encouraged to run the whole bootstrap process from time to time to help us catching install and versioniing bugs too.
-
-Testing the packages
-^^^^^^^^^^^^^^^^^^^^
-When we have a release candidate for the supported platforms (Debian stable, Ubuntu 12.04 by now), we will announce also the URI where you can download the rc for testing in your system. Stay tuned!
-
-Testing the status of translations
-----------------------------------
-
-We need translators! You can go to `transifex <https://www.transifex.com/projects/p/leap-client/>`_, get an account and start contributing.
-
-If you want to check the current status of the client localization in a language other than the one set in your machine, you can do it with a simple trick (under linux). For instance, do::
-
- $ lang=es_ES leap-client
-
-for running LEAP Client with the spanish locales.
-
-Reporting bugs
---------------
-
-.. admonition:: Reporting better bugs
-
- There is a great text on the art of bug reporting, that can be found `online <http://www.chiark.greenend.org.uk/~sgtatham/bugs.html>`_.
-
-.. TODO add a line with ref. to running the client in debug mode...
-
-We use the `LEAP Client Bug Tracker <https://leap.se/code/projects/eip-client>`_, although you can also use `Github issues <https://github.com/leapcode/leap_client/issues>`_.
diff --git a/docs/user/gpl.png b/docs/user/gpl.png
deleted file mode 100644
index 3e9136e6..00000000
--- a/docs/user/gpl.png
+++ /dev/null
Binary files differ
diff --git a/docs/user/install.rst b/docs/user/install.rst
deleted file mode 100644
index 1f0fd831..00000000
--- a/docs/user/install.rst
+++ /dev/null
@@ -1,47 +0,0 @@
-.. _install:
-
-Installation
-============
-
-This part of the documentation covers the installation of the LEAP Client.
-We assume that you want to get it properly installed before being able to use it.
-
-Debian package
---------------
-
-.. warning::
-
- No updated debian package yet.
-
-Once we have a release candidate, probably the easiest way of having the LEAP Client installed will be to install a .deb package under debian or ubuntu systems.
-
-
-Distribute & Pip
-----------------
-
-.. warning::
-
- This does not work yet, since we have not released an initial version yet to the cheese shop.
-
-Installing LEAP Client will be as simple as using `pip <http://www.pip-installer.org/>`_ once we have a release candidate::
-
- $ pip install leap-client
-
-Get the code
-------------
-
-.. warning::
-
- This... won't work either, as-is. This should be the third optional way to install stable releases from master branch. Right now that does not work because there is *nothing* updated in the master branch. Leaving this here since this is what we will be doing, but if you really intend to have a working tree, refer to the sections :ref:`setting up a working environment <environment>` or :ref:`fetching latest code for testing <fetchinglatest>`.
-
-You can get the code from LEAP public git repository ::
-
- git clone git://leap.se/leap_client
-
-Or from the github mirror ::
-
- git clone git://github.com/leapcode/leap_client.git
-
-Once you have grabbed a copy of the sources, you can install it into your site-packages easily ::
-
- $ pyton setup.py install
diff --git a/docs/user/intro.rst b/docs/user/intro.rst
deleted file mode 100644
index abb6d487..00000000
--- a/docs/user/intro.rst
+++ /dev/null
@@ -1,101 +0,0 @@
-.. _introduction:
-
-Introduction
-============
-
-The LEAP Client
----------------
-.. if yoy change this, change it also in the index.rst
-The **LEAP Client** is a :ref:`GPL3 Licensed <gpl3>` multiplatform client, written in python using PyQt4, that supports the features offered by :ref:`the LEAP Platform <leapplatform>`. Currently is being tested on Linux, support for OSX and Windows will come soon.
-
-Features
-^^^^^^^^
-
-The LEAP Client allows to easily secure communications.
-
-- Provider selection
-- User registration
-- Encrypted Internet Proxy support (autoconfigured service using openvpn).
-
-Coming soon
-^^^^^^^^^^^^
-
-- Encrypted email
-
-.. _leapplatform:
-
-The LEAP Platform
-^^^^^^^^^^^^^^^^^
-The LEAP Provider Platform is the server-side part of LEAP that is run by service providers. It consists of a set of complementary packages and recipes to automate the maintenance of LEAP services in a hardened GNU/Linux environment. Our goal is to make it painless for service providers and ISPs to deploy a secure communications platform.
-
-Read `more about the LEAP Platform <https://leap.se/en/technology/platform>`_ or `check out the code <https://github.com/leapcode/leap_platform>`_.
-
-
-.. _philosophy:
-
-Philosophy
-----------
-
-The Right to Whisper
-^^^^^^^^^^^^^^^^^^^^
-LEAP fights for *the right to whisper*.
-
-Like free speech, the right to whisper is an necessary precondition for **a free society**. Without it, civil society and political freedom become impossible. As the importance of digital communication for civic participation increases, so does the importance of the ability to digitally whisper.
-
-Unfortunately, advances in surveillance technology are rapidly eroding the ability to whisper. This is a worldwide problem, not simply an issue for people in repressive contexts. Acceptance of poor security in the West creates a global standard of insecure practice, even among civil society actors who urgently need the ability to communicate safely.
-
-The stakes could not be higher. Activists are dying because their communication technologies betray their identity, location, and conversations. When activists attempt to secure their communications, they face confusing software, a dearth of secure providers, and a greater risk of being flagged as potential troublemakers. In other words, problems of usability, availability, and adoption.
-
-Our vision
-^^^^^^^^^^
-The LEAP vision is to attack these problems of usability, availability, and adoption head on.
-
-To address **usability**:
- we are creating a complete system where the user-facing client software is
- tightly coupled with the cloud-base components of the system. All our software
- will be auto-configuring, prevent users from practicing insecure behavior, and
- primarily limit the configuration options to those moments when the user is placing i
- their trust in another entity.
-
-To address **availability**:
- LEAP will work closely with service providers to adopt our open source, automatedl
- platform for running high-availability communication services. By lowering the
- barriers of entry to become a reliable provider, we can increase the supply and
- decrease the cost of secure communications.
-
-To address **adoption**:
- the LEAP platform layers higher security on top of existing protocols to allow
- users a gradual transition path and backward compatibility. Our goal is to create
- services that are attractive in terms of features, usability, and price for users in
- both democratic and repressive contexts.
-
-All contributions should have these three points in mind.
-
-.. _`gpl3`:
-
-GPLv3 License
---------------
-
-.. image:: gpl.*
-
-The LEAP Client is released under the terms of the `GNU GPL version 3`_ or later.
-
-::
-
- The LEAP Client is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
- (at your option) any later version.
-
- The LEAP Client is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with the LEAP Client. If not, see http://www.gnu.org/licenses/.
-
-.. _`GNU GPL version 3`: http://www.gnu.org/licenses/gpl.txt
-
-.. ??? include whole version?
- .. include:: ../COPYING
diff --git a/docs/user/running.rst b/docs/user/running.rst
deleted file mode 100644
index 35ec1f6d..00000000
--- a/docs/user/running.rst
+++ /dev/null
@@ -1,40 +0,0 @@
-.. _running:
-
-Running
-==================
-
-This document dovers how to launch the LEAP Client.
-
-Launching the client
---------------------
-After a successful installation, there should be a launcher called leap-client somewhere in your path::
-
- % leap-client
-
-
-.. _debugmode:
-
-Debug mode
-----------
-If you are happy having lots of output in your terminal, you will like to know that you can run the client in debug mode::
-
- $ leap-client --debug
-
-If you ask for it, you can also have all that debug info in a beautiful file ready to be attached to your bug reports::
-
- $ leap-client --debug --logfile /tmp/leap.log
-
-.. warning::
- the following is broken since it will clutter your stdout with all the commands sent to the management interface.
- See bug #1232
-
-If you want to increment the level of verbosity passed to openvpn, you can do::
-
-
- $ leap-client --openvpn-verbosity 4
-
-Options
-------------
-To see all the available command line options::
-
- $ leap-client --help
diff --git a/openvpn/README b/openvpn/README
deleted file mode 100644
index bf2205c2..00000000
--- a/openvpn/README
+++ /dev/null
@@ -1,6 +0,0 @@
-OpenVPN binary, build scripts
-Works using a GCC minGW32 cross-compiler on Debian/Ubuntu
-Produces a working MS Windows executable
-openvpn.exe: PE32 executable (DLL) (console) Intel 80386, for MS Windows
-goes smooth for the 99%, might still need some slapping the flags around now and then
- -jrml
diff --git a/openvpn/Sources b/openvpn/Sources
deleted file mode 100644
index e2fe7bb3..00000000
--- a/openvpn/Sources
+++ /dev/null
@@ -1,4 +0,0 @@
-lzo -2.06 .tar.gz
-opensc -0.12.2 .tar.gz
-openssl -1.0.1c .tar.gz
-polarssl -1.1.4 .tgz
diff --git a/openvpn/build.zsh b/openvpn/build.zsh
deleted file mode 100755
index b36717c1..00000000
--- a/openvpn/build.zsh
+++ /dev/null
@@ -1,191 +0,0 @@
-#!/bin/zsh
-#
-# Copyright (C) 2012 Denis Roio <jaromil@dyne.org>
-#
-# This source code is free software; you can redistribute it and/or
-# modify it under the terms of the GNU Public License as published by
-# the Free Software Foundation; either version 3 of the License, or
-# (at your option) any later version.
-#
-# This source code is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-# Please refer to the GNU Public License for more details.
-#
-# You should have received a copy of the GNU Public License along with
-# this source code; if not, write to:
-# Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-
-
-REPO="http://files.dyne.org/leap/openvpn/sources"
-TOPSRC=`pwd`
-QUIET=0
-DEBUG=0
-
-
-autoload colors; colors
-# standard output message routines
-# it's always useful to wrap them, in case we change behaviour later
-notice() { if [[ $QUIET == 0 ]]; then print "$fg_bold[green][*]$fg_no_bold[default] $1" >&2; fi }
-error() { if [[ $QUIET == 0 ]]; then print "$fg[red][!]$fg[default] $1" >&2; fi }
-func() { if [[ $DEBUG == 1 ]]; then print "$fg[blue][D]$fg[default] $1" >&2; fi }
-act() {
- if [[ $QUIET == 0 ]]; then
- if [ "$1" = "-n" ]; then
- print -n "$fg_bold[white] . $fg_no_bold[default] $2" >&2;
- else
- print "$fg_bold[white] . $fg_no_bold[default] $1" >&2;
- fi
- fi
-}
-
-{ test "$1" = "clean" } && {
- notice "Cleaning up all build in ${TOPSRC}"
- for src in `cat Sources | awk '
-/^#/ {next}
-/^./ { print $1 }'`; do
- { test "$src" != "" } && { rm -rf "${src}" }
- done
- act "Done."
- return 0
-}
-
-os="`uname -s`"
-target="$1"
-notice "OpenVPN build on $os for $target in ${TOPSRC}"
-
-prepare_sources() {
- notice "Preparing sources"
- # look for a file names "Sources", download and decompress entries
- # format of file: name version compression (complete filename when merged)
- { test -r Sources } || {
- error "Sources not found, nothing to build here"
- return 1
- }
- for src in `cat Sources | awk '
-/^#/ {next}
-/^./ { print $1 ";" $2 ";" $3 }'`; do
- name="${src[(ws:;:)1]}"
- ver="${src[(ws:;:)2]}"
- arch="${src[(ws:;:)3]}"
- file="${name}${ver}${arch}"
- func "preparing source for ${name}${ver}"
-
- { test "$1" != "" } && {
- test "$1" != "$name" } && {
- continue }
-
- # download the file
- { test -r ${file} } || {
- act "downloading ${file}"
- curl ${REPO}/${file} -o ${file}
- }
- # decompress the file
- { test -r ${name} } || {
- act "decompressing ${name}"
- case $arch in
- ## BARE SOURCE
- .tar.gz) tar xfz ${file}; mv ${name}${ver} ${name} ;;
- .tar.bz2) tar xfj ${file}; mv ${name}${ver} ${name} ;;
- .tgz) tar xfz ${file}; mv ${name}${ver} ${name} ;;
- *) error "compression not supported: $arch"
- esac
- }
- act "${name} source ready"
- done
-}
-
-act "Downloading sources"
-
-# git clone latest openvpn
-{ test -r openvpn } || { git clone https://github.com/OpenVPN/openvpn.git }
-
-case "$os" in
- Darwin)
- prepare_sources lzo
- prepare_sources polarssl
- ;;
- Linux) # Cross-compile for Win32
- prepare_sources lzo
- prepare_sources opensc
- prepare_sources openssl
- # tap windows
- { test -r tap-windows } || { git clone https://github.com/OpenVPN/tap-windows.git }
- ;;
-esac
-
-notice "Sources ready, now compiling..."
-LOG="`pwd`/build.log"; touch ${LOG}
-act "logs saved in build.log"
-
-case "$target" in
- osx)
- { test -r polarssl/library/libpolarssl.a } || {
- act "building PolarSSL..."
- pushd polarssl
- CC=clang cmake . >> ${LOG}
- make -C library clean
- cat CMakeCache.txt | awk '
-/^CMAKE_C_COMPILER/ { print "CMAKE_C_COMPILER:FILEPATH=/usr/bin/clang"; next }
-/^CMAKE_BUILD_TYPE/ { print $1 "Release"; next }
-/^CMAKE_C_FLAGS:STRING/ { print "CMAKE_C_FLAGS:STRING=-arch x86_64 -arch i386"; next }
-{ print $0 }
-' > CMakeCache.leap
- cp CMakeCache.leap CMakeCache.txt
- make -C library >> ${LOG}
- popd
- act "done."
- }
-
- act "building OpenVPN"
- pushd openvpn
- CC=clang CFLAGS="-arch x86_64 -arch i386" \
- LZO_LIBS="/opt/local/lib/liblzo2.a" LZO_CFLAGS="-I/opt/local/include" \
- POLARSSL_CFLAGS="-I${TOPSRC}/polarssl/include" \
- POLARSSL_LIBS="${TOPSRC}/polarssl/library/libpolarssl.a" \
- ./configure --with-crypto-library=polarssl >> ${LOG}
- make src/openvpn/openvpn
- popd
- act "done."
- ;;
-
- win32)
- { test -r lzo/src/liblzo2.la } || { pushd lzo
- act "building LZO lib"
- ./configure --host=i586-mingw32msvc >> ${LOG}
- make >> ${LOG}; popd }
- # openssl
- { test -r openssl/libssl.a } || {
- act "building OpenSSL lib"
- pushd openssl
- ./Configure --cross-compile-prefix=i586-mingw32msvc- mingw >> ${LOG}
- make ${LOG}; popd }
-
- pushd openvpn
- act "building latest OpenVPN"
- { test -r configure } || {
- sed -i -e 's/-municode//' src/openvpn/Makefile.am
- autoreconf -i >> ${LOG}
- }
- CFLAGS="-I/usr/i586-mingw32msvc/include/ddk -D_WIN32_WINNT=0x0501" \
- LZO_LIBS="${TOPSRC}/lzo/src/liblzo2.la" \
- LZO_CFLAGS="-I${TOPSRC}/lzo/include" \
- TAP_CFLAGS="-I${TOPSRC}/tap-windows/src" \
- OPENSSL_SSL_CFLAGS="-I${TOPSRC}/openssl/include" \
- OPENSSL_CRYPTO_CFLAGS="-I${TOPSRC}/openssl/crypto" \
- OPENSSL_SSL_LIBS="${TOPSRC}/openssl/libssl.a" \
- OPENSSL_CRYPTO_LIBS="${TOPSRC}/openssl/libcrypto.a" \
- ./configure --host=i586-mingw32msvc >> ${LOG}
- make >> ${LOG}
- popd
-
- act "If OpenVPN build reports a final error on linkage, it might be due to a libtool bug"
- act "(something like undefined reference to _WinMain@16)"
- act "You need to go inside openvpn/src/openvpn and issue the last compile line manually"
- act "adding an flat '-shared' at the end of it, then do 'cp .libs/openvpn.exe .'"
- act "Happy hacking."
- ;;
- *)
- error "Unknown target: $target"
- ;;
-esac
diff --git a/pkg/__init__.py b/pkg/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/pkg/__init__.py
+++ /dev/null
diff --git a/pkg/branding/__init__.py b/pkg/branding/__init__.py
deleted file mode 100644
index 0bd6befb..00000000
--- a/pkg/branding/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from .config import APP_BASE_NAME, APP_PREFIX, BRANDED_BUILD, BRANDED_OPTS
-
-
-def get_name():
- if BRANDED_BUILD is True:
- return APP_PREFIX + BRANDED_OPTS.get('short_name', 'name_unknown')
- else:
- return APP_BASE_NAME
-
-
-def get_shortname():
- if BRANDED_BUILD is True:
- return BRANDED_OPTS.get('short_name', 'name_unknown')
-
-__all__ = ['get_name']
diff --git a/pkg/branding/config.py b/pkg/branding/config.py
deleted file mode 100644
index bcacc3bc..00000000
--- a/pkg/branding/config.py
+++ /dev/null
@@ -1,11 +0,0 @@
-# Configuration file for branding
-
-BRANDED_BUILD = False
-
-APP_BASE_NAME = "leap-client"
-APP_PREFIX = "%s-" % APP_BASE_NAME
-
-BRANDED_OPTS = {
- 'short_name': "",
- 'provider_domain': "",
- 'provider_ca_path': ""}
diff --git a/pkg/dev-reqs.pip b/pkg/dev-reqs.pip
deleted file mode 100644
index 44799a26..00000000
--- a/pkg/dev-reqs.pip
+++ /dev/null
@@ -1,4 +0,0 @@
-ipython
-ipdb
-pdb4qt
-pygeoip
diff --git a/pkg/distribute_setup.py b/pkg/distribute_setup.py
deleted file mode 100644
index 8f5b0637..00000000
--- a/pkg/distribute_setup.py
+++ /dev/null
@@ -1,515 +0,0 @@
-#!python
-"""Bootstrap distribute installation
-
-If you want to use setuptools in your package's setup.py, just include this
-file in the same directory with it, and add this to the top of your setup.py::
-
- from distribute_setup import use_setuptools
- use_setuptools()
-
-If you want to require a specific version of setuptools, set a download
-mirror, or use an alternate download directory, you can do so by supplying
-the appropriate options to ``use_setuptools()``.
-
-This file can also be run as a script to install or upgrade setuptools.
-"""
-import os
-import sys
-import time
-import fnmatch
-import tempfile
-import tarfile
-from distutils import log
-
-try:
- from site import USER_SITE
-except ImportError:
- USER_SITE = None
-
-try:
- import subprocess
-
- def _python_cmd(*args):
- args = (sys.executable,) + args
- return subprocess.call(args) == 0
-
-except ImportError:
- # will be used for python 2.3
- def _python_cmd(*args):
- args = (sys.executable,) + args
- # quoting arguments if windows
- if sys.platform == 'win32':
- def quote(arg):
- if ' ' in arg:
- return '"%s"' % arg
- return arg
- args = [quote(arg) for arg in args]
- return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
-
-DEFAULT_VERSION = "0.6.28"
-DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
-SETUPTOOLS_FAKED_VERSION = "0.6c11"
-
-SETUPTOOLS_PKG_INFO = """\
-Metadata-Version: 1.0
-Name: setuptools
-Version: %s
-Summary: xxxx
-Home-page: xxx
-Author: xxx
-Author-email: xxx
-License: xxx
-Description: xxx
-""" % SETUPTOOLS_FAKED_VERSION
-
-
-def _install(tarball, install_args=()):
- # extracting the tarball
- tmpdir = tempfile.mkdtemp()
- log.warn('Extracting in %s', tmpdir)
- old_wd = os.getcwd()
- try:
- os.chdir(tmpdir)
- tar = tarfile.open(tarball)
- _extractall(tar)
- tar.close()
-
- # going in the directory
- subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
- os.chdir(subdir)
- log.warn('Now working in %s', subdir)
-
- # installing
- log.warn('Installing Distribute')
- if not _python_cmd('setup.py', 'install', *install_args):
- log.warn('Something went wrong during the installation.')
- log.warn('See the error message above.')
- finally:
- os.chdir(old_wd)
-
-
-def _build_egg(egg, tarball, to_dir):
- # extracting the tarball
- tmpdir = tempfile.mkdtemp()
- log.warn('Extracting in %s', tmpdir)
- old_wd = os.getcwd()
- try:
- os.chdir(tmpdir)
- tar = tarfile.open(tarball)
- _extractall(tar)
- tar.close()
-
- # going in the directory
- subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
- os.chdir(subdir)
- log.warn('Now working in %s', subdir)
-
- # building an egg
- log.warn('Building a Distribute egg in %s', to_dir)
- _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
-
- finally:
- os.chdir(old_wd)
- # returning the result
- log.warn(egg)
- if not os.path.exists(egg):
- raise IOError('Could not build the egg.')
-
-
-def _do_download(version, download_base, to_dir, download_delay):
- egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
- % (version, sys.version_info[0], sys.version_info[1]))
- if not os.path.exists(egg):
- tarball = download_setuptools(version, download_base,
- to_dir, download_delay)
- _build_egg(egg, tarball, to_dir)
- sys.path.insert(0, egg)
- import setuptools
- setuptools.bootstrap_install_from = egg
-
-
-def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
- to_dir=os.curdir, download_delay=15, no_fake=True):
- # making sure we use the absolute path
- to_dir = os.path.abspath(to_dir)
- was_imported = 'pkg_resources' in sys.modules or \
- 'setuptools' in sys.modules
- try:
- try:
- import pkg_resources
- if not hasattr(pkg_resources, '_distribute'):
- if not no_fake:
- _fake_setuptools()
- raise ImportError
- except ImportError:
- return _do_download(version, download_base, to_dir, download_delay)
- try:
- pkg_resources.require("distribute>=" + version)
- return
- except pkg_resources.VersionConflict:
- e = sys.exc_info()[1]
- if was_imported:
- sys.stderr.write(
- "The required version of distribute (>=%s) is not available,\n"
- "and can't be installed while this script is running. Please\n"
- "install a more recent version first, using\n"
- "'easy_install -U distribute'."
- "\n\n(Currently using %r)\n" % (version, e.args[0]))
- sys.exit(2)
- else:
- del pkg_resources, sys.modules['pkg_resources'] # reload ok
- return _do_download(version, download_base, to_dir,
- download_delay)
- except pkg_resources.DistributionNotFound:
- return _do_download(version, download_base, to_dir,
- download_delay)
- finally:
- if not no_fake:
- _create_fake_setuptools_pkg_info(to_dir)
-
-
-def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
- to_dir=os.curdir, delay=15):
- """Download distribute from a specified location and return its filename
-
- `version` should be a valid distribute version number that is available
- as an egg for download under the `download_base` URL (which should end
- with a '/'). `to_dir` is the directory where the egg will be downloaded.
- `delay` is the number of seconds to pause before an actual download
- attempt.
- """
- # making sure we use the absolute path
- to_dir = os.path.abspath(to_dir)
- try:
- from urllib.request import urlopen
- except ImportError:
- from urllib2 import urlopen
- tgz_name = "distribute-%s.tar.gz" % version
- url = download_base + tgz_name
- saveto = os.path.join(to_dir, tgz_name)
- src = dst = None
- if not os.path.exists(saveto): # Avoid repeated downloads
- try:
- log.warn("Downloading %s", url)
- src = urlopen(url)
- # Read/write all in one block, so we don't create a corrupt file
- # if the download is interrupted.
- data = src.read()
- dst = open(saveto, "wb")
- dst.write(data)
- finally:
- if src:
- src.close()
- if dst:
- dst.close()
- return os.path.realpath(saveto)
-
-
-def _no_sandbox(function):
- def __no_sandbox(*args, **kw):
- try:
- from setuptools.sandbox import DirectorySandbox
- if not hasattr(DirectorySandbox, '_old'):
- def violation(*args):
- pass
- DirectorySandbox._old = DirectorySandbox._violation
- DirectorySandbox._violation = violation
- patched = True
- else:
- patched = False
- except ImportError:
- patched = False
-
- try:
- return function(*args, **kw)
- finally:
- if patched:
- DirectorySandbox._violation = DirectorySandbox._old
- del DirectorySandbox._old
-
- return __no_sandbox
-
-
-def _patch_file(path, content):
- """Will backup the file then patch it"""
- existing_content = open(path).read()
- if existing_content == content:
- # already patched
- log.warn('Already patched.')
- return False
- log.warn('Patching...')
- _rename_path(path)
- f = open(path, 'w')
- try:
- f.write(content)
- finally:
- f.close()
- return True
-
-_patch_file = _no_sandbox(_patch_file)
-
-
-def _same_content(path, content):
- return open(path).read() == content
-
-
-def _rename_path(path):
- new_name = path + '.OLD.%s' % time.time()
- log.warn('Renaming %s into %s', path, new_name)
- os.rename(path, new_name)
- return new_name
-
-
-def _remove_flat_installation(placeholder):
- if not os.path.isdir(placeholder):
- log.warn('Unkown installation at %s', placeholder)
- return False
- found = False
- for file in os.listdir(placeholder):
- if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
- found = True
- break
- if not found:
- log.warn('Could not locate setuptools*.egg-info')
- return
-
- log.warn('Removing elements out of the way...')
- pkg_info = os.path.join(placeholder, file)
- if os.path.isdir(pkg_info):
- patched = _patch_egg_dir(pkg_info)
- else:
- patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
-
- if not patched:
- log.warn('%s already patched.', pkg_info)
- return False
- # now let's move the files out of the way
- for element in ('setuptools', 'pkg_resources.py', 'site.py'):
- element = os.path.join(placeholder, element)
- if os.path.exists(element):
- _rename_path(element)
- else:
- log.warn('Could not find the %s element of the '
- 'Setuptools distribution', element)
- return True
-
-_remove_flat_installation = _no_sandbox(_remove_flat_installation)
-
-
-def _after_install(dist):
- log.warn('After install bootstrap.')
- placeholder = dist.get_command_obj('install').install_purelib
- _create_fake_setuptools_pkg_info(placeholder)
-
-
-def _create_fake_setuptools_pkg_info(placeholder):
- if not placeholder or not os.path.exists(placeholder):
- log.warn('Could not find the install location')
- return
- pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
- setuptools_file = 'setuptools-%s-py%s.egg-info' % \
- (SETUPTOOLS_FAKED_VERSION, pyver)
- pkg_info = os.path.join(placeholder, setuptools_file)
- if os.path.exists(pkg_info):
- log.warn('%s already exists', pkg_info)
- return
-
- if not os.access(pkg_info, os.W_OK):
- log.warn("Don't have permissions to write %s, skipping", pkg_info)
-
- log.warn('Creating %s', pkg_info)
- f = open(pkg_info, 'w')
- try:
- f.write(SETUPTOOLS_PKG_INFO)
- finally:
- f.close()
-
- pth_file = os.path.join(placeholder, 'setuptools.pth')
- log.warn('Creating %s', pth_file)
- f = open(pth_file, 'w')
- try:
- f.write(os.path.join(os.curdir, setuptools_file))
- finally:
- f.close()
-
-_create_fake_setuptools_pkg_info = _no_sandbox(
- _create_fake_setuptools_pkg_info
-)
-
-
-def _patch_egg_dir(path):
- # let's check if it's already patched
- pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
- if os.path.exists(pkg_info):
- if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
- log.warn('%s already patched.', pkg_info)
- return False
- _rename_path(path)
- os.mkdir(path)
- os.mkdir(os.path.join(path, 'EGG-INFO'))
- pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
- f = open(pkg_info, 'w')
- try:
- f.write(SETUPTOOLS_PKG_INFO)
- finally:
- f.close()
- return True
-
-_patch_egg_dir = _no_sandbox(_patch_egg_dir)
-
-
-def _before_install():
- log.warn('Before install bootstrap.')
- _fake_setuptools()
-
-
-def _under_prefix(location):
- if 'install' not in sys.argv:
- return True
- args = sys.argv[sys.argv.index('install') + 1:]
- for index, arg in enumerate(args):
- for option in ('--root', '--prefix'):
- if arg.startswith('%s=' % option):
- top_dir = arg.split('root=')[-1]
- return location.startswith(top_dir)
- elif arg == option:
- if len(args) > index:
- top_dir = args[index + 1]
- return location.startswith(top_dir)
- if arg == '--user' and USER_SITE is not None:
- return location.startswith(USER_SITE)
- return True
-
-
-def _fake_setuptools():
- log.warn('Scanning installed packages')
- try:
- import pkg_resources
- except ImportError:
- # we're cool
- log.warn('Setuptools or Distribute does not seem to be installed.')
- return
- ws = pkg_resources.working_set
- try:
- setuptools_dist = ws.find(
- pkg_resources.Requirement.parse('setuptools', replacement=False)
- )
- except TypeError:
- # old distribute API
- setuptools_dist = ws.find(
- pkg_resources.Requirement.parse('setuptools')
- )
-
- if setuptools_dist is None:
- log.warn('No setuptools distribution found')
- return
- # detecting if it was already faked
- setuptools_location = setuptools_dist.location
- log.warn('Setuptools installation detected at %s', setuptools_location)
-
- # if --root or --preix was provided, and if
- # setuptools is not located in them, we don't patch it
- if not _under_prefix(setuptools_location):
- log.warn('Not patching, --root or --prefix is installing Distribute'
- ' in another location')
- return
-
- # let's see if its an egg
- if not setuptools_location.endswith('.egg'):
- log.warn('Non-egg installation')
- res = _remove_flat_installation(setuptools_location)
- if not res:
- return
- else:
- log.warn('Egg installation')
- pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
- if (os.path.exists(pkg_info) and
- _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
- log.warn('Already patched.')
- return
- log.warn('Patching...')
- # let's create a fake egg replacing setuptools one
- res = _patch_egg_dir(setuptools_location)
- if not res:
- return
- log.warn('Patched done.')
- _relaunch()
-
-
-def _relaunch():
- log.warn('Relaunching...')
- # we have to relaunch the process
- # pip marker to avoid a relaunch bug
- _cmd = ['-c', 'install', '--single-version-externally-managed']
- if sys.argv[:3] == _cmd:
- sys.argv[0] = 'setup.py'
- args = [sys.executable] + sys.argv
- sys.exit(subprocess.call(args))
-
-
-def _extractall(self, path=".", members=None):
- """Extract all members from the archive to the current working
- directory and set owner, modification time and permissions on
- directories afterwards. `path' specifies a different directory
- to extract to. `members' is optional and must be a subset of the
- list returned by getmembers().
- """
- import copy
- import operator
- from tarfile import ExtractError
- directories = []
-
- if members is None:
- members = self
-
- for tarinfo in members:
- if tarinfo.isdir():
- # Extract directories with a safe mode.
- directories.append(tarinfo)
- tarinfo = copy.copy(tarinfo)
- tarinfo.mode = 448 # decimal for oct 0700
- self.extract(tarinfo, path)
-
- # Reverse sort directories.
- if sys.version_info < (2, 4):
- def sorter(dir1, dir2):
- return cmp(dir1.name, dir2.name)
- directories.sort(sorter)
- directories.reverse()
- else:
- directories.sort(key=operator.attrgetter('name'), reverse=True)
-
- # Set correct owner, mtime and filemode on directories.
- for tarinfo in directories:
- dirpath = os.path.join(path, tarinfo.name)
- try:
- self.chown(tarinfo, dirpath)
- self.utime(tarinfo, dirpath)
- self.chmod(tarinfo, dirpath)
- except ExtractError:
- e = sys.exc_info()[1]
- if self.errorlevel > 1:
- raise
- else:
- self._dbg(1, "tarfile: %s" % e)
-
-
-def _build_install_args(argv):
- install_args = []
- user_install = '--user' in argv
- if user_install and sys.version_info < (2, 6):
- log.warn("--user requires Python 2.6 or later")
- raise SystemExit(1)
- if user_install:
- install_args.append('--user')
- return install_args
-
-
-def main(argv, version=DEFAULT_VERSION):
- """Install or upgrade setuptools and EasyInstall"""
- tarball = download_setuptools()
- _install(tarball, _build_install_args(argv))
-
-
-if __name__ == '__main__':
- main(sys.argv[1:])
diff --git a/pkg/install_pyqt.sh b/pkg/install_pyqt.sh
deleted file mode 100755
index d6739816..00000000
--- a/pkg/install_pyqt.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/sh
-pip install sip # fails
-cd build/sip
-python configure.py
-make && make install
-cd ../..
-pip install PyQt # fails
-cd build/PyQt
-python configure.py
-make && make install
diff --git a/pkg/install_venv.py b/pkg/install_venv.py
deleted file mode 100644
index 17dfb984..00000000
--- a/pkg/install_venv.py
+++ /dev/null
@@ -1,247 +0,0 @@
-# vim: tabstop=4 shiftwidth=4 softtabstop=4
-
-# Copyright 2010 United States Government as represented by the
-# Administrator of the National Aeronautics and Space Administration.
-# All Rights Reserved.
-#
-# Copyright 2010 OpenStack, LLC
-#
-# Licensed under the Apache License, Version 2.0 (the "License"); you may
-# not use this file except in compliance with the License. You may obtain
-# a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-
-"""
-Installation script for Nova's development virtualenv
-"""
-
-import optparse
-import os
-import subprocess
-import sys
-
-ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
-VENV = os.path.join(ROOT, '.venv')
-PIP_REQUIRES = os.path.join(ROOT, 'pkg', 'requirements.pip')
-TEST_REQUIRES = os.path.join(ROOT, 'pkg', 'test-requirements.pip')
-PY_VERSION = "python%s.%s" % (sys.version_info[0], sys.version_info[1])
-
-
-def die(message, *args):
- print >> sys.stderr, message % args
- sys.exit(1)
-
-
-def check_python_version():
- if sys.version_info < (2, 6):
- die("Need Python Version >= 2.6")
-
-
-def run_command_with_code(cmd, redirect_output=True, check_exit_code=True):
- """
- Runs a command in an out-of-process shell, returning the
- output of that command. Working directory is ROOT.
- """
- if redirect_output:
- stdout = subprocess.PIPE
- else:
- stdout = None
-
- print 'executing command: %s', cmd
- proc = subprocess.Popen(cmd, cwd=ROOT, stdout=stdout)
- output = proc.communicate()[0]
- if check_exit_code and proc.returncode != 0:
- die('Command "%s" failed.\n%s', ' '.join(cmd), output)
- return (output, proc.returncode)
-
-
-def run_command(cmd, redirect_output=True, check_exit_code=True):
- return run_command_with_code(cmd, redirect_output, check_exit_code)[0]
-
-
-class Distro(object):
-
- def check_cmd(self, cmd):
- return bool(run_command(['which', cmd], check_exit_code=False).strip())
-
- def install_virtualenv(self):
- if self.check_cmd('virtualenv'):
- return
-
- if self.check_cmd('easy_install'):
- print 'Installing virtualenv via easy_install...',
- if run_command(['easy_install', 'virtualenv']):
- print 'Succeeded'
- return
- else:
- print 'Failed'
-
- die('ERROR: virtualenv not found.\n\nDevelopment'
- ' requires virtualenv, please install it using your'
- ' favorite package management tool')
-
- def post_process(self):
- """Any distribution-specific post-processing gets done here.
-
- In particular, this is useful for applying patches to code inside
- the venv."""
- pass
-
-
-class Debian(Distro):
- """This covers all Debian-based distributions."""
-
- def check_pkg(self, pkg):
- return run_command_with_code(['dpkg', '-l', pkg],
- check_exit_code=False)[1] == 0
-
- def apt_install(self, pkg, **kwargs):
- run_command(['sudo', 'apt-get', 'install', '-y', pkg], **kwargs)
-
- def apply_patch(self, originalfile, patchfile):
- run_command(['patch', originalfile, patchfile])
-
- def post_process(self):
- #symlink qt in virtualenv
- run_command(['pkg/tools/with_venv.sh', 'pkg/postmkvenv.sh'])
-
- def install_virtualenv(self):
- if self.check_cmd('virtualenv'):
- return
-
- if not self.check_pkg('python-virtualenv'):
- self.apt_install('python-virtualenv', check_exit_code=False)
-
- super(Debian, self).install_virtualenv()
-
-
-class Fedora(Distro):
- """This covers all Fedora-based distributions.
-
- Includes: Fedora, RHEL, CentOS, Scientific Linux"""
-
- def check_pkg(self, pkg):
- return run_command_with_code(['rpm', '-q', pkg],
- check_exit_code=False)[1] == 0
-
- def yum_install(self, pkg, **kwargs):
- run_command(['sudo', 'yum', 'install', '-y', pkg], **kwargs)
-
- def apply_patch(self, originalfile, patchfile):
- run_command(['patch', originalfile, patchfile])
-
- def install_virtualenv(self):
- if self.check_cmd('virtualenv'):
- return
-
- if not self.check_pkg('python-virtualenv'):
- self.yum_install('python-virtualenv', check_exit_code=False)
-
- super(Fedora, self).install_virtualenv()
-
-
-def get_distro():
- if os.path.exists('/etc/fedora-release') or \
- os.path.exists('/etc/redhat-release'):
- return Fedora()
- elif os.path.exists('/etc/debian_version'):
- return Debian()
- else:
- return Distro()
-
-
-def check_dependencies():
- get_distro().install_virtualenv()
-
-
-def create_virtualenv(venv=VENV, no_site_packages=True):
- """Creates the virtual environment and installs PIP only into the
- virtual environment
- """
- print 'Creating venv...',
- if no_site_packages:
- #setuptools and virtualenv don't play nicely together,
- #so we create the virtualenv with the distribute package instead.
- #See: view-source:http://pypi.python.org/pypi/distribute
- run_command(['virtualenv', '-q', '--distribute', '--no-site-packages', VENV])
- else:
- run_command(['virtualenv', '-q', '--distribute', VENV])
- print 'done.'
- print 'Installing pip in virtualenv...',
- if not run_command(['pkg/tools/with_venv.sh', 'easy_install',
- 'pip>1.0']).strip():
- die("Failed to install pip.")
- print 'done.'
-
-
-def pip_install(*args):
- run_command(['pkg/tools/with_venv.sh',
- 'pip', 'install', '--upgrade'] + list(args),
- redirect_output=False)
-
-
-def install_dependencies(venv=VENV):
- print 'Installing dependencies with pip (this can take a while)...'
-
- # First things first, make sure our venv has the latest pip and distribute.
- pip_install('pip')
- pip_install('distribute')
-
- pip_install('-r', PIP_REQUIRES)
- pip_install('-r', TEST_REQUIRES)
-
- # "
- pthfile = os.path.join(venv, "lib", PY_VERSION, "site-packages",
- "leap-client.pth")
- f = open(pthfile, 'w')
- f.write("%s\n" % ROOT)
-
-
-def post_process():
- get_distro().post_process()
-
-
-def print_help():
- help = """
- To activate the leap virtualenv for the extent of your current
- shell session you can run:
-
- $ source .venv/bin/activate
-
- Or, if you prefer, you can run commands in the virtualenv on a case by case
- basis by running:
-
- $ pkg/tools/with_venv.sh <your command>
-
- Also, make test will automatically use the virtualenv.
- """
- print help
-
-
-def parse_args():
- """Parse command-line arguments"""
- parser = optparse.OptionParser()
- parser.add_option("-n", "--no-site-packages", dest="no_site_packages",
- default=False, action="store_true",
- help="Do not inherit packages from global Python install")
- return parser.parse_args()
-
-
-def main(argv):
- (options, args) = parse_args()
- check_python_version()
- check_dependencies()
- create_virtualenv(no_site_packages=options.no_site_packages)
- install_dependencies()
- post_process()
- print_help()
-
-if __name__ == '__main__':
- main(sys.argv)
diff --git a/pkg/linux/README b/pkg/linux/README
deleted file mode 100644
index 7410789b..00000000
--- a/pkg/linux/README
+++ /dev/null
@@ -1,4 +0,0 @@
-= Files =
-In GNU/Linux, we expect these files to be in place:
-
-resolv-update -> /etc/leap/resolv-update
diff --git a/pkg/linux/leap.desktop b/pkg/linux/leap.desktop
deleted file mode 100644
index 7a6d39d9..00000000
--- a/pkg/linux/leap.desktop
+++ /dev/null
@@ -1,13 +0,0 @@
-[Desktop Entry]
-Version=0.1.0
-Encoding=UTF-8
-Name=EIP
-Comment=Anonymity and privacy
-Comment[en]=Anonymity and privacy
-Comment[es]=Anonimato y privacidad
-Comment[sv]=Anonymitet och avlyssningsskydd
-Exec=leap
-Terminal=false
-Type=Application
-Icon=leap.png
-Categories=Network;
diff --git a/pkg/linux/polkit/net.openvpn.gui.leap.policy b/pkg/linux/polkit/net.openvpn.gui.leap.policy
deleted file mode 100644
index 50f991a3..00000000
--- a/pkg/linux/polkit/net.openvpn.gui.leap.policy
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE policyconfig PUBLIC
- "-//freedesktop//DTD PolicyKit Policy Configuration 1.0//EN"
- "http://www.freedesktop.org/standards/PolicyKit/1/policyconfig.dtd">
-<policyconfig>
-
- <vendor>LEAP Project</vendor>
- <vendor_url>http://leap.se/</vendor_url>
-
- <action id="net.openvpn,gui.leap.run-openvpn">
- <description>Runs the openvpn binary</description>
- <description xml:lang="es">Ejecuta el binario openvpn</description>
- <message>OpenVPN needs that you authenticate to start</message>
- <message xml:lang="es">OpenVPN necesita autorizacion para comenzar</message>
- <icon_name>package-x-generic</icon_name>
- <defaults>
- <allow_any>yes</allow_any>
- <allow_inactive>yes</allow_inactive>
- <allow_active>yes</allow_active>
- </defaults>
- <annotate key="org.freedesktop.policykit.exec.path">/usr/sbin/openvpn</annotate>
- </action>
-</policyconfig>
diff --git a/pkg/linux/resolv-update b/pkg/linux/resolv-update
deleted file mode 100755
index a54802e3..00000000
--- a/pkg/linux/resolv-update
+++ /dev/null
@@ -1,90 +0,0 @@
-#!/bin/bash
-#
-# Parses options from openvpn to update resolv.conf
-#
-# The only way to enforce that a linux system will not leak DNS
-# queries is to replace /etc/resolv.conf with a file that only
-# has the DNS resolver specified by the VPN.
-#
-# That is what this script does. This is what resolvconf is for,
-# but sadly it does not always work.
-#
-# Example envs set from openvpn:
-# foreign_option_1='dhcp-option DNS 193.43.27.132'
-# foreign_option_2='dhcp-option DNS 193.43.27.133'
-# foreign_option_3='dhcp-option DOMAIN be.bnc.ch'
-#
-
-function up() {
-
- comment=$(
-cat <<SETVAR
-#
-# This is a temporary resolv.conf set by the LEAP Client in order to
-# strictly enforce that DNS lookups are secured by the VPN.
-#
-# When the LEAP Client quits or the VPN connection it manages is dropped,
-# this file will be replace with the regularly scheduled /etc/resolv.conf
-#
-# If you want custom entries to appear in this file while LEAP is running,
-# put them in /etc/leap/resolv-head or /etc/leap/resolv-tail. These files
-# should only be writable by root.
-#
-
-SETVAR
-)
-
- if [ -f /etc/leap/resolv-head ] ; then
- custom_head=$(cat /etc/leap/resolv-head)
- else
- custom_head=""
- fi
-
- if [ -f /etc/leap/resolv-tail ] ; then
- custom_tail=$(cat /etc/leap/resolv-tail)
- else
- custom_tail=""
- fi
-
- for optionname in ${!foreign_option_*} ; do
- option="${!optionname}"
- echo $option
- part1=$(echo "$option" | cut -d " " -f 1)
- if [ "$part1" == "dhcp-option" ] ; then
- part2=$(echo "$option" | cut -d " " -f 2)
- part3=$(echo "$option" | cut -d " " -f 3)
- if [ "$part2" == "DNS" ] ; then
- IF_DNS_NAMESERVERS="$IF_DNS_NAMESERVERS $part3"
- fi
- if [ "$part2" == "DOMAIN" ] ; then
- IF_DNS_SEARCH="$IF_DNS_SEARCH $part3"
- fi
- fi
- done
- R=""
- for SS in $IF_DNS_SEARCH ; do
- R="${R}search $SS
-"
- done
- for NS in $IF_DNS_NAMESERVERS ; do
- R="${R}nameserver $NS
-"
- done
- mv /etc/resolv.conf /etc/resolv.conf.bak
- echo "$comment
-$custom_head
-$R
-$custom_tail" > /etc/resolv.conf
-}
-
-function down() {
- if [ -f /etc/resolv.conf.bak ] ; then
- unlink /etc/resolv.conf
- mv /etc/resolv.conf.bak /etc/resolv.conf
- fi
-}
-
-case $script_type in
- up) up ;;
- down) down ;;
-esac
diff --git a/pkg/osx/Info.plist b/pkg/osx/Info.plist
deleted file mode 100644
index e90d920a..00000000
--- a/pkg/osx/Info.plist
+++ /dev/null
@@ -1,22 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
- <key>CFBundleDisplayName</key>
- <string>leap-client</string>
- <key>CFBundleExecutable</key>
- <string>MacOS/app</string>
- <key>CFBundleIconFile</key>
- <string>icon-windowed.icns</string>
- <key>CFBundleInfoDictionaryVersion</key>
- <string>6.0</string>
- <key>CFBundleName</key>
- <string>leap-client</string>
- <key>CFBundlePackageType</key>
- <string>APPL</string>
- <key>CFBundleShortVersionString</key>
- <string>1</string>
- <key>LSBackgroundOnly</key>
- <false/>
-</dict>
-</plist>
diff --git a/pkg/osx/Makefile b/pkg/osx/Makefile
deleted file mode 100644
index f2520fcf..00000000
--- a/pkg/osx/Makefile
+++ /dev/null
@@ -1,46 +0,0 @@
-#WARNING: You need to run this with an activated VIRTUALENV.
-
-OSX = dist/LEAP\ Client.app/Contents/MacOS/
-GITC = `git rev-parse --short HEAD`
-DMG = "dist/leap-client-$(GITC).dmg"
-INST = "dist/LEAP Client installer.app"
-INSTR = "dist/LEAP Client installer.app/Contents/Resources"
-
-pkg : dist trim installer dmg
-
-dist :
- ~/pyinstaller/pyinstaller.py -w -s leap-client.spec
- cp -r /opt/local/Library/Frameworks/QtGui.framework/Versions/4/Resources/qt_menu.nib "dist/LEAP Client.app/Contents/Resources"
- cp Info.plist "dist/LEAP Client.app/Contents/Info.plist"
- cp ../../data/images/leap-client.icns "dist/LEAP Client.app/Contents/Resources/icon-windowed.icns"
-
-trim:
- #XXX this should go properly in pyinstaller spec excludes, but going quick'n'dirty
- rm $(OSX)QtSvg $(OSX)QtXml $(OSX)QtNetwork $(OSX)QtOpenGL $(OSX)Qt3Support $(OSX)QtSql
-
-installer:
- #XXX need to fix some paths there (binary, etc)
- platypus -P install/leap-installer.platypus -y $(INST)
- #XXX should build tuntap extensions ourselves
- mkdir $(INSTR)/StartupItems
- mkdir $(INSTR)/Extensions
- cp -r /opt/local/Library/StartupItems/tun $(INSTR)/StartupItems
- cp -r /opt/local/Library/StartupItems/tap $(INSTR)/StartupItems
- cp -r /opt/local/Library/Extensions/tun.kext $(INSTR)/Extensions
- cp -r /opt/local/Library/Extensions/tap.kext $(INSTR)/Extensions
- #copy the binary that we have previously built
- #XXX not building it yet...
- cp ../../openvpn/build/openvpn.leap $(INSTR)
- #copy startup scripts
- cp install/client.up.sh $(INSTR)
- cp install/client.down.sh $(INSTR)
- cp install/ProcessNetworkChanges.plist.template $(INSTR)
- #Finally, copy application bundle...
- cp -r "dist/LEAP Client.app" $(INSTR)
-
-dmg :
- rm -f $(DMG)
- hdiutil create -format UDBZ -srcfolder $(INST) $(DMG)
-
-clean :
- rm -rf dist/ build/
diff --git a/pkg/osx/README.rst b/pkg/osx/README.rst
deleted file mode 100644
index 48d96ffb..00000000
--- a/pkg/osx/README.rst
+++ /dev/null
@@ -1,60 +0,0 @@
-environment setup in osx
-========================
-(I rm'd my README by mistake at some point. Re-do).
-
-basically you need this to setup your environment:
-
-# check and consolidate
-
-# install xcode and macports
-# port -v selfupdate
-# port install python26
-# port install python_select # unneeded?
-# port install py26-pyqt4
-# port install py26-twisted
-# port install py26-pip
-# port install py26-virtualenv
-# port install git-core
-# port install gnutls
-# port install platypus
-
-Requirements
-============
-pyinstaller (in ~/pyinstaller)
-platypus (tested with latest macports)
-
-... + install environment as usual,
- inside virtualenv.
-
-.. note:: there is something missing here, about troubles building gnutls extension,
- I think I ended by symlinking global install via macports.
-
-Pyinstaller fix for sip api
----------------------------
-We need a workaround for setting the right sip api.
-Paste this in the top of pyinstaller/support/rthooks/pyi_rth_qt4plugins.py::
-
- import sip
- sip.setapi('QString', 2)
- sip.setapi('QVariant', 2)
-
-See www.pyinstaller.org/wiki/Recipe/PyQtChangeApiVersion.
-
-Building the package
-====================
-
-Building the binary
--------------------
-We use the scripts in openvpn/build.zsh
-The packaging Makefile is expecting the final binary in the location::
-
- ../../openvpn/build/openvpn.leap
-
-Running the build
------------------
-IMPORTANT: activate the VIRTUALENV FIRST!
-(you will get an import error otherwise)
-
-For running all steps at once::
-
- make pkg
diff --git a/pkg/osx/install/ProcessNetworkChanges.plist.template b/pkg/osx/install/ProcessNetworkChanges.plist.template
deleted file mode 100644
index faea8dee..00000000
--- a/pkg/osx/install/ProcessNetworkChanges.plist.template
+++ /dev/null
@@ -1,16 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
- <dict>
- <key>Label</key>
- <string>net.tunnelblick.openvpn.process-network-changes</string>
- <key>ProgramArguments</key>
- <array>
- <string>${DIR}/process-network-changes</string>
- </array>
- <key>WatchPaths</key>
- <array>
- <string>/Library/Preferences/SystemConfiguration</string>
- </array>
- </dict>
-</plist>
diff --git a/pkg/osx/install/client.down.sh b/pkg/osx/install/client.down.sh
deleted file mode 100755
index 47f00ed7..00000000
--- a/pkg/osx/install/client.down.sh
+++ /dev/null
@@ -1,146 +0,0 @@
-#!/bin/bash -e
-# Note: must be bash; uses bash-specific tricks
-#
-# ******************************************************************************************************************
-# This Tunnelblick script does everything! It handles TUN and TAP interfaces,
-# pushed configurations and DHCP leases. :)
-#
-# This is the "Down" version of the script, executed after the connection is
-# closed.
-#
-# Created by: Nick Williams (using original code and parts of old Tblk scripts)
-#
-# ******************************************************************************************************************
-
-trap "" TSTP
-trap "" HUP
-trap "" INT
-export PATH="/bin:/sbin:/usr/sbin:/usr/bin"
-
-readonly LOG_MESSAGE_COMMAND=$(basename "${0}")
-
-# Quick check - is the configuration there?
-if ! scutil -w State:/Network/OpenVPN &>/dev/null -t 1 ; then
- # Configuration isn't there, so we forget it
- echo "$(date '+%a %b %e %T %Y') *Tunnelblick $LOG_MESSAGE_COMMAND: WARNING: No existing OpenVPN DNS configuration found; not tearing down anything; exiting."
- exit 0
-fi
-
-# NOTE: This script does not use any arguments passed to it by OpenVPN, so it doesn't shift Tunnelblick options out of the argument list
-
-# Get info saved by the up script
-TUNNELBLICK_CONFIG="$(/usr/sbin/scutil <<-EOF
- open
- show State:/Network/OpenVPN
- quit
-EOF)"
-
-ARG_MONITOR_NETWORK_CONFIGURATION="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*MonitorNetwork :' | sed -e 's/^.*: //g')"
-LEASEWATCHER_PLIST_PATH="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*LeaseWatcherPlistPath :' | sed -e 's/^.*: //g')"
-PSID="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*Service :' | sed -e 's/^.*: //g')"
-SCRIPT_LOG_FILE="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*ScriptLogFile :' | sed -e 's/^.*: //g')"
-# Don't need: ARG_RESTORE_ON_DNS_RESET="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*RestoreOnDNSReset :' | sed -e 's/^.*: //g')"
-# Don't need: ARG_RESTORE_ON_WINS_RESET="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*RestoreOnWINSReset :' | sed -e 's/^.*: //g')"
-# Don't need: PROCESS="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*PID :' | sed -e 's/^.*: //g')"
-# Don't need: ARG_IGNORE_OPTION_FLAGS="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*IgnoreOptionFlags :' | sed -e 's/^.*: //g')"
-ARG_TAP="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*IsTapInterface :' | sed -e 's/^.*: //g')"
-bRouteGatewayIsDhcp="$(echo "${TUNNELBLICK_CONFIG}" | grep -i '^[[:space:]]*RouteGatewayIsDhcp :' | sed -e 's/^.*: //g')"
-
-# @param String message - The message to log
-logMessage()
-{
- echo "$(date '+%a %b %e %T %Y') *Tunnelblick $LOG_MESSAGE_COMMAND: "${@} >> "${SCRIPT_LOG_FILE}"
-}
-
-trim()
-{
- echo ${@}
-}
-
-if ${ARG_TAP} ; then
- if [ "$bRouteGatewayIsDhcp" == "true" ]; then
- if [ -z "$dev" ]; then
- logMessage "Cannot configure TAP interface for DHCP without \$dev being defined. Device may not have disconnected properly."
- else
- set +e
- ipconfig set "$dev" NONE 2>/dev/null
- set -e
- fi
- fi
-fi
-
-# Issue warning if the primary service ID has changed
-PSID_CURRENT="$( (scutil | grep Service | sed -e 's/.*Service : //')<<- EOF
- open
- show State:/Network/OpenVPN
- quit
-EOF)"
-if [ "${PSID}" != "${PSID_CURRENT}" ] ; then
- logMessage "Ignoring change of Network Primary Service from ${PSID} to ${PSID_CURRENT}"
-fi
-
-# Remove leasewatcher
-if ${ARG_MONITOR_NETWORK_CONFIGURATION} ; then
- launchctl unload "${LEASEWATCHER_PLIST_PATH}"
- logMessage "Cancelled monitoring of system configuration changes"
-fi
-
-# Restore configurations
-DNS_OLD="$(/usr/sbin/scutil <<-EOF
- open
- show State:/Network/OpenVPN/OldDNS
- quit
-EOF)"
-WINS_OLD="$(/usr/sbin/scutil <<-EOF
- open
- show State:/Network/OpenVPN/OldSMB
- quit
-EOF)"
-TB_NO_SUCH_KEY="<dictionary> {
- TunnelblickNoSuchKey : true
-}"
-
-if [ "${DNS_OLD}" = "${TB_NO_SUCH_KEY}" ] ; then
- scutil <<- EOF
- open
- remove State:/Network/Service/${PSID}/DNS
- quit
-EOF
-else
- scutil <<- EOF
- open
- get State:/Network/OpenVPN/OldDNS
- set State:/Network/Service/${PSID}/DNS
- quit
-EOF
-fi
-
-if [ "${WINS_OLD}" = "${TB_NO_SUCH_KEY}" ] ; then
- scutil <<- EOF
- open
- remove State:/Network/Service/${PSID}/SMB
- quit
-EOF
-else
- scutil <<- EOF
- open
- get State:/Network/OpenVPN/OldSMB
- set State:/Network/Service/${PSID}/SMB
- quit
-EOF
-fi
-
-logMessage "Restored the DNS and WINS configurations"
-
-# Remove our system configuration data
-scutil <<- EOF
- open
- remove State:/Network/OpenVPN/SMB
- remove State:/Network/OpenVPN/DNS
- remove State:/Network/OpenVPN/OldSMB
- remove State:/Network/OpenVPN/OldDNS
- remove State:/Network/OpenVPN
- quit
-EOF
-
-exit 0
diff --git a/pkg/osx/install/client.up.sh b/pkg/osx/install/client.up.sh
deleted file mode 100755
index fc7e341a..00000000
--- a/pkg/osx/install/client.up.sh
+++ /dev/null
@@ -1,596 +0,0 @@
-#!/bin/bash -e
-# Note: must be bash; uses bash-specific tricks
-#
-# ******************************************************************************************************************
-# This Tunnelblick script does everything! It handles TUN and TAP interfaces,
-# pushed configurations, DHCP with DNS and WINS, and renewed DHCP leases. :)
-#
-# This is the "Up" version of the script, executed after the interface is
-# initialized.
-#
-# Created by: Nick Williams (using original code and parts of old Tblk scripts)
-#
-# ******************************************************************************************************************
-
-trap "" TSTP
-trap "" HUP
-trap "" INT
-export PATH="/bin:/sbin:/usr/sbin:/usr/bin"
-
-# Process optional arguments (if any) for the script
-# Each one begins with a "-"
-# They come from Tunnelblick, and come first, before the OpenVPN arguments
-# So we set ARG_ script variables to their values and shift them out of the argument list
-# When we're done, only the OpenVPN arguments remain for the rest of the script to use
-ARG_MONITOR_NETWORK_CONFIGURATION="false"
-ARG_RESTORE_ON_DNS_RESET="false"
-ARG_RESTORE_ON_WINS_RESET="false"
-ARG_TAP="false"
-ARG_IGNORE_OPTION_FLAGS=""
-
-while [ {$#} ] ; do
- if [ "$1" = "-m" ] ; then # Handle the arguments we know about
- ARG_MONITOR_NETWORK_CONFIGURATION="true" # by setting ARG_ script variables to their values
- shift # Then shift them out
- elif [ "$1" = "-d" ] ; then
- ARG_RESTORE_ON_DNS_RESET="true"
- shift
- elif [ "$1" = "-w" ] ; then
- ARG_RESTORE_ON_WINS_RESET="true"
- shift
- elif [ "$1" = "-a" ] ; then
- ARG_TAP="true"
- shift
- elif [ "${1:0:2}" = "-i" ] ; then
- ARG_IGNORE_OPTION_FLAGS="${1}"
- shift
- elif [ "${1:0:2}" = "-a" ] ; then
- ARG_IGNORE_OPTION_FLAGS="${1}"
- shift
- else
- if [ "${1:0:1}" = "-" ] ; then # Shift out Tunnelblick arguments (they start with "-") that we don't understand
- shift # so the rest of the script sees only the OpenVPN arguments
- else
- break
- fi
- fi
-done
-
-readonly ARG_MONITOR_NETWORK_CONFIGURATION ARG_RESTORE_ON_DNS_RESET ARG_RESTORE_ON_WINS_RESET ARG_TAP ARG_IGNORE_OPTION_FLAGS
-
-# Note: The script log path name is constructed from the path of the regular config file, not the shadow copy
-# if the config is shadow copy, e.g. /Library/Application Support/Tunnelblick/Users/Jonathan/Folder/Subfolder/config.ovpn
-# then convert to regular config /Users/Jonathan/Library/Application Support/Tunnelblick/Configurations/Folder/Subfolder/config.ovpn
-# to get the script log path
-# Note: "/Users/..." works even if the home directory has a different path; it is used in the name of the log file, and is not used as a path to get to anything.
-readonly TBALTPREFIX="/Library/Application Support/Tunnelblick/Users/"
-readonly TBALTPREFIXLEN="${#TBALTPREFIX}"
-readonly TBCONFIGSTART="${config:0:$TBALTPREFIXLEN}"
-if [ "$TBCONFIGSTART" = "$TBALTPREFIX" ] ; then
- readonly TBBASE="${config:$TBALTPREFIXLEN}"
- readonly TBSUFFIX="${TBBASE#*/}"
- readonly TBUSERNAME="${TBBASE%%/*}"
- readonly TBCONFIG="/Users/$TBUSERNAME/Library/Application Support/Tunnelblick/Configurations/$TBSUFFIX"
-else
- readonly TBCONFIG="${config}"
-fi
-
-readonly CONFIG_PATH_DASHES_SLASHES="$(echo "${TBCONFIG}" | sed -e 's/-/--/g' | sed -e 's/\//-S/g')"
-readonly SCRIPT_LOG_FILE="/Library/Application Support/Tunnelblick/Logs/${CONFIG_PATH_DASHES_SLASHES}.script.log"
-
-readonly TB_RESOURCE_PATH=$(dirname "${0}")
-
-LEASEWATCHER_PLIST_PATH="/Library/Application Support/Tunnelblick/LeaseWatch.plist"
-
-readonly OSVER="$(sw_vers | grep 'ProductVersion:' | grep -o '10\.[0-9]*')"
-
-readonly DEFAULT_DOMAIN_NAME="openvpn"
-
-bRouteGatewayIsDhcp="false"
-
-# @param String message - The message to log
-readonly LOG_MESSAGE_COMMAND=$(basename "${0}")
-logMessage()
-{
- echo "$(date '+%a %b %e %T %Y') *Tunnelblick $LOG_MESSAGE_COMMAND: "${@} >> "${SCRIPT_LOG_FILE}"
-}
-
-# @param String string - Content to trim
-trim()
-{
- echo ${@}
-}
-
-# @param String[] dnsServers - The name servers to use
-# @param String domainName - The domain name to use
-# @param \optional String[] winsServers - The WINS servers to use
-setDnsServersAndDomainName()
-{
- declare -a vDNS=("${!1}")
- domain=$2
- declare -a vWINS=("${!3}")
-
- set +e # "grep" will return error status (1) if no matches are found, so don't fail on individual errors
-
- PSID=$( (scutil | grep PrimaryService | sed -e 's/.*PrimaryService : //')<<- EOF
- open
- show State:/Network/Global/IPv4
- quit
-EOF )
-
- STATIC_DNS_CONFIG="$( (scutil | sed -e 's/^[[:space:]]*[[:digit:]]* : //g' | tr '\n' ' ')<<- EOF
- open
- show Setup:/Network/Service/${PSID}/DNS
- quit
-EOF )"
- if echo "${STATIC_DNS_CONFIG}" | grep -q "ServerAddresses" ; then
- readonly STATIC_DNS="$(trim "$( echo "${STATIC_DNS_CONFIG}" | sed -e 's/^.*ServerAddresses[^{]*{[[:space:]]*\([^}]*\)[[:space:]]*}.*$/\1/g' )")"
- fi
- if echo "${STATIC_DNS_CONFIG}" | grep -q "SearchDomains" ; then
- readonly STATIC_SEARCH="$(trim "$( echo "${STATIC_DNS_CONFIG}" | sed -e 's/^.*SearchDomains[^{]*{[[:space:]]*\([^}]*\)[[:space:]]*}.*$/\1/g' )")"
- fi
-
- STATIC_WINS_CONFIG="$( (scutil | sed -e 's/^[[:space:]]*[[:digit:]]* : //g' | tr '\n' ' ')<<- EOF
- open
- show Setup:/Network/Service/${PSID}/SMB
- quit
-EOF )"
- STATIC_WINS_SERVERS=""
- STATIC_WORKGROUP=""
- STATIC_NETBIOSNAME=""
- if echo "${STATIC_WINS_CONFIG}" | grep -q "WINSAddresses" ; then
- STATIC_WINS_SERVERS="$(trim "$( echo "${STATIC_WINS_CONFIG}" | sed -e 's/^.*WINSAddresses[^{]*{[[:space:]]*\([^}]*\)[[:space:]]*}.*$/\1/g' )")"
- fi
- if echo "${STATIC_WINS_CONFIG}" | grep -q "Workgroup" ; then
- STATIC_WORKGROUP="$(trim "$( echo "${STATIC_WINS_CONFIG}" | sed -e 's/^.*Workgroup : \([^[:space:]]*\).*$/\1/g' )")"
- fi
- if echo "${STATIC_WINS_CONFIG}" | grep -q "NetBIOSName" ; then
- STATIC_NETBIOSNAME="$(trim "$( echo "${STATIC_WINS_CONFIG}" | sed -e 's/^.*NetBIOSName : \([^[:space:]]*\).*$/\1/g' )")"
- fi
- readonly STATIC_WINS_SERVERS STATIC_WORKGROUP STATIC_NETBIOSNAME
-
- if [ ${#vDNS[*]} -eq 0 ] ; then
- DYN_DNS="false"
- ALL_DNS="${STATIC_DNS}"
- elif [ -n "${STATIC_DNS}" ] ; then
- case "${OSVER}" in
- 10.6 | 10.7 )
- # Do nothing - in 10.6 we don't aggregate our configurations, apparently
- DYN_DNS="false"
- ALL_DNS="${STATIC_DNS}"
- ;;
- 10.4 | 10.5 )
- DYN_DNS="true"
- # We need to remove duplicate DNS entries, so that our reference list matches MacOSX's
- SDNS="$(echo "${STATIC_DNS}" | tr ' ' '\n')"
- (( i=0 ))
- for n in "${vDNS[@]}" ; do
- if echo "${SDNS}" | grep -q "${n}" ; then
- unset vDNS[${i}]
- fi
- (( i++ ))
- done
- if [ ${#vDNS[*]} -gt 0 ] ; then
- ALL_DNS="$(trim "${STATIC_DNS}" "${vDNS[*]}")"
- else
- DYN_DNS="false"
- ALL_DNS="${STATIC_DNS}"
- fi
- ;;
- esac
- else
- DYN_DNS="true"
- ALL_DNS="$(trim "${vDNS[*]}")"
- fi
- readonly DYN_DNS ALL_DNS
-
- if [ ${#vWINS[*]} -eq 0 ] ; then
- DYN_WINS="false"
- ALL_WINS_SERVERS="${STATIC_WINS_SERVERS}"
- elif [ -n "${STATIC_WINS_SERVERS}" ] ; then
- case "${OSVER}" in
- 10.6 | 10.7 )
- # Do nothing - in 10.6 we don't aggregate our configurations, apparently
- DYN_WINS="false"
- ALL_WINS_SERVERS="${STATIC_WINS_SERVERS}"
- ;;
- 10.4 | 10.5 )
- DYN_WINS="true"
- # We need to remove duplicate WINS entries, so that our reference list matches MacOSX's
- SWINS="$(echo "${STATIC_WINS_SERVERS}" | tr ' ' '\n')"
- (( i=0 ))
- for n in "${vWINS[@]}" ; do
- if echo "${SWINS}" | grep -q "${n}" ; then
- unset vWINS[${i}]
- fi
- (( i++ ))
- done
- if [ ${#vWINS[*]} -gt 0 ] ; then
- ALL_WINS_SERVERS="$(trim "${STATIC_WINS_SERVERS}" "${vWINS[*]}")"
- else
- DYN_WINS="false"
- ALL_WINS_SERVERS="${STATIC_WINS_SERVERS}"
- fi
- ;;
- esac
- else
- DYN_WINS="true"
- ALL_WINS_SERVERS="$(trim "${vWINS[*]}")"
- fi
- readonly DYN_WINS ALL_WINS_SERVERS
-
- # We double-check that our search domain isn't already on the list
- SEARCH_DOMAIN="${domain}"
- case "${OSVER}" in
- 10.6 | 10.7 )
- # Do nothing - in 10.6 we don't aggregate our configurations, apparently
- if [ -n "${STATIC_SEARCH}" ] ; then
- ALL_SEARCH="${STATIC_SEARCH}"
- SEARCH_DOMAIN=""
- else
- ALL_SEARCH="${SEARCH_DOMAIN}"
- fi
- ;;
- 10.4 | 10.5 )
- if echo "${STATIC_SEARCH}" | tr ' ' '\n' | grep -q "${SEARCH_DOMAIN}" ; then
- SEARCH_DOMAIN=""
- fi
- if [ -z "${SEARCH_DOMAIN}" ] ; then
- ALL_SEARCH="${STATIC_SEARCH}"
- else
- ALL_SEARCH="$(trim "${STATIC_SEARCH}" "${SEARCH_DOMAIN}")"
- fi
- ;;
- esac
- readonly SEARCH_DOMAIN ALL_SEARCH
-
- if ! ${DYN_DNS} ; then
- NO_DNS="#"
- fi
- if ! ${DYN_WINS} ; then
- NO_WS="#"
- fi
- if [ -z "${SEARCH_DOMAIN}" ] ; then
- NO_SEARCH="#"
- fi
- if [ -z "${STATIC_WORKGROUP}" ] ; then
- NO_WG="#"
- fi
- if [ -z "${STATIC_NETBIOSNAME}" ] ; then
- NO_NB="#"
- fi
- if [ -z "${ALL_DNS}" ] ; then
- AGG_DNS="#"
- fi
- if [ -z "${ALL_SEARCH}" ] ; then
- AGG_SEARCH="#"
- fi
- if [ -z "${ALL_WINS_SERVERS}" ] ; then
- AGG_WINS="#"
- fi
-
- # Now, do the aggregation
- # Save the openvpn process ID and the Network Primary Service ID, leasewather.plist path, logfile path, and optional arguments from Tunnelblick,
- # then save old and new DNS and WINS settings
- # PPID is a bash-script variable that contains the process ID of the parent of the process running the script (i.e., OpenVPN's process ID)
- # config is an environmental variable set to the configuration path by OpenVPN prior to running this up script
- logMessage "Up to two 'No such key' warnings are normal and may be ignored"
-
- # If DNS is manually set, it overrides the DHCP setting, which isn't reflected in 'State:/Network/Service/${PSID}/DNS'
- if echo "${STATIC_DNS_CONFIG}" | grep -q "ServerAddresses" ; then
- CORRECT_OLD_DNS_KEY="Setup:"
- else
- CORRECT_OLD_DNS_KEY="State:"
- fi
-
- # If WINS is manually set, it overrides the DHCP setting, which isn't reflected in 'State:/Network/Service/${PSID}/DNS'
- if echo "${STATIC_WINS_CONFIG}" | grep -q "WINSAddresses" ; then
- CORRECT_OLD_WINS_KEY="Setup:"
- else
- CORRECT_OLD_WINS_KEY="State:"
- fi
-
- # If we are not expecting any WINS value, add <TunnelblickNoSuchKey : true> to the expected WINS setup
- NO_NOSUCH_KEY_WINS="#"
- if [ "${NO_NB}" = "#" -a "${AGG_WINS}" = "#" -a "${NO_WG}" = "#" ] ; then
- NO_NOSUCH_KEY_WINS=""
- fi
- readonly NO_NOSUCH_KEY_WINS
-
- set -e # We instruct bash that it CAN again fail on errors
-
- scutil <<- EOF
- open
- d.init
- d.add PID # ${PPID}
- d.add Service ${PSID}
- d.add LeaseWatcherPlistPath "${LEASEWATCHER_PLIST_PATH}"
- d.add ScriptLogFile "${SCRIPT_LOG_FILE}"
- d.add MonitorNetwork "${ARG_MONITOR_NETWORK_CONFIGURATION}"
- d.add RestoreOnDNSReset "${ARG_RESTORE_ON_DNS_RESET}"
- d.add RestoreOnWINSReset "${ARG_RESTORE_ON_WINS_RESET}"
- d.add IgnoreOptionFlags "${ARG_IGNORE_OPTION_FLAGS}"
- d.add IsTapInterface "${ARG_TAP}"
- d.add RouteGatewayIsDhcp "${bRouteGatewayIsDhcp}"
- set State:/Network/OpenVPN
-
- # First, back up the device's current DNS and WINS configurations
- # Indicate 'no such key' by a dictionary with a single entry: "TunnelblickNoSuchKey : true"
- d.init
- d.add TunnelblickNoSuchKey true
- get ${CORRECT_OLD_DNS_KEY}/Network/Service/${PSID}/DNS
- set State:/Network/OpenVPN/OldDNS
-
- d.init
- d.add TunnelblickNoSuchKey true
- get ${CORRECT_OLD_WINS_KEY}/Network/Service/${PSID}/SMB
- set State:/Network/OpenVPN/OldSMB
-
- # Second, initialize the new DNS map
- d.init
- ${NO_DNS}d.add ServerAddresses * ${vDNS[*]}
- ${NO_SEARCH}d.add SearchDomains * ${SEARCH_DOMAIN}
- d.add DomainName ${domain}
- set State:/Network/Service/${PSID}/DNS
-
- # Third, initialize the WINS map
- d.init
- ${NO_NB}d.add NetBIOSName ${STATIC_NETBIOSNAME}
- ${NO_WS}d.add WINSAddresses * ${vWINS[*]}
- ${NO_WG}d.add Workgroup ${STATIC_WORKGROUP}
- set State:/Network/Service/${PSID}/SMB
-
- # Now, initialize the maps that will be compared against the system-generated map
- # which means that we will have to aggregate configurations of statically-configured
- # nameservers, and statically-configured search domains
- d.init
- ${AGG_DNS}d.add ServerAddresses * ${ALL_DNS}
- ${AGG_SEARCH}d.add SearchDomains * ${ALL_SEARCH}
- d.add DomainName ${domain}
- set State:/Network/OpenVPN/DNS
-
- d.init
- ${NO_NB}d.add NetBIOSName ${STATIC_NETBIOSNAME}
- ${AGG_WINS}d.add WINSAddresses * ${ALL_WINS_SERVERS}
- ${NO_WG}d.add Workgroup ${STATIC_WORKGROUP}
- ${NO_NOSUCH_KEY_WINS}d.add TunnelblickNoSuchKey true
- set State:/Network/OpenVPN/SMB
-
- # We are done
- quit
-EOF
-
- logMessage "Saved the DNS and WINS configurations for later use"
-
- if ${ARG_MONITOR_NETWORK_CONFIGURATION} ; then
- if [ "${ARG_IGNORE_OPTION_FLAGS:0:2}" = "-a" ] ; then
- # Generate an updated plist with the path for process-network-changes
- readonly LEASEWATCHER_TEMPLATE_PATH="$(dirname "${0}")/ProcessNetworkChanges.plist.template"
- sed -e "s|\${DIR}|$(dirname "${0}")|g" "${LEASEWATCHER_TEMPLATE_PATH}" > "${LEASEWATCHER_PLIST_PATH}"
- launchctl load "${LEASEWATCHER_PLIST_PATH}"
- logMessage "Set up to monitor system configuration with process-network-changes"
- else
- # Generate an updated plist with the path for leasewatch
- readonly LEASEWATCHER_TEMPLATE_PATH="$(dirname "${0}")/LeaseWatch.plist.template"
- sed -e "s|\${DIR}|$(dirname "${0}")|g" "${LEASEWATCHER_TEMPLATE_PATH}" > "${LEASEWATCHER_PLIST_PATH}"
- launchctl load "${LEASEWATCHER_PLIST_PATH}"
- logMessage "Set up to monitor system configuration with leasewatch"
- fi
- fi
-}
-
-configureDhcpDns()
-{
- # whilst ipconfig will have created the neccessary Network Service keys, the DNS
- # settings won't actually be used by OS X unless the SupplementalMatchDomains key
- # is added
- # ref. <http://lists.apple.com/archives/Macnetworkprog/2005/Jun/msg00011.html>
- # - is there a way to extract the domains from the SC dictionary and re-insert
- # as SupplementalMatchDomains? i.e. not requiring the ipconfig domain_name call?
-
- # - wait until we get a lease before extracting the DNS domain name and merging into SC
- # - despite it's name, ipconfig waitall doesn't (but maybe one day it will :-)
- ipconfig waitall
-
- unset test_domain_name
- unset test_name_server
-
- set +e # We instruct bash NOT to exit on individual command errors, because if we need to wait longer these commands will fail
-
- # usually takes at least a few seconds to get a DHCP lease
- sleep 3
- n=0
- while [ -z "$test_domain_name" -a -z "$test_name_server" -a $n -lt 5 ]
- do
- logMessage "Sleeping for $n seconds to wait for DHCP to finish setup."
- sleep $n
- n=`expr $n + 1`
-
- if [ -z "$test_domain_name" ]; then
- test_domain_name=`ipconfig getoption $dev domain_name 2>/dev/null`
- fi
-
- if [ -z "$test_name_server" ]; then
- test_name_server=`ipconfig getoption $dev domain_name_server 2>/dev/null`
- fi
- done
-
- sGetPacketOutput=`ipconfig getpacket $dev`
-
- set -e # We instruct bash that it CAN again fail on individual errors
-
- #echo "`date` test_domain_name = $test_domain_name, test_name_server = $test_name_server, sGetPacketOutput = $sGetPacketOutput"
-
- unset aNameServers
- unset aWinsServers
-
- nNameServerIndex=1
- nWinsServerIndex=1
-
- if [ "$sGetPacketOutput" ]; then
- sGetPacketOutput_FirstLine=`echo "$sGetPacketOutput"|head -n 1`
- #echo $sGetPacketOutput_FirstLine
-
- if [ "$sGetPacketOutput_FirstLine" == "op = BOOTREPLY" ]; then
- set +e # "grep" will return error status (1) if no matches are found, so don't fail on individual errors
-
- for tNameServer in `echo "$sGetPacketOutput"|grep "domain_name_server"|grep -Eo "\{([0-9\.]+)(, [0-9\.]+)*\}"|grep -Eo "([0-9\.]+)"`; do
- aNameServers[nNameServerIndex-1]="$(trim "$tNameServer")"
- let nNameServerIndex++
- done
-
- for tWINSServer in `echo "$sGetPacketOutput"|grep "nb_over_tcpip_name_server"|grep -Eo "\{([0-9\.]+)(, [0-9\.]+)*\}"|grep -Eo "([0-9\.]+)"`; do
- aWinsServers[nWinsServerIndex-1]="$(trim "$tWINSServer")"
- let nWinsServerIndex++
- done
-
- sDomainName=`echo "$sGetPacketOutput"|grep "domain_name "|grep -Eo ": [-A-Za-z0-9\-\.]+"|grep -Eo "[-A-Za-z0-9\-\.]+"`
- sDomainName="$(trim "$sDomainName")"
-
- if [ ${#aNameServers[*]} -gt 0 -a "$sDomainName" ]; then
- logMessage "Retrieved name server(s) [ ${aNameServers[@]} ], domain name [ $sDomainName ], and WINS server(s) [ ${aWinsServers[@]} ]"
- setDnsServersAndDomainName aNameServers[@] "$sDomainName" aWinsServers[@]
- return 0
- elif [ ${#aNameServers[*]} -gt 0 ]; then
- logMessage "Retrieved name server(s) [ ${aNameServers[@]} ] and WINS server(s) [ ${aWinsServers[@]} ] and using default domain name [ $DEFAULT_DOMAIN_NAME ]"
- setDnsServersAndDomainName aNameServers[@] "$DEFAULT_DOMAIN_NAME" aWinsServers[@]
- return 0
- else
- # Should we return 1 here and indicate an error, or attempt the old method?
- logMessage "No useful information extracted from DHCP/BOOTP packet. Attempting legacy configuration."
- fi
-
- set -e # We instruct bash that it CAN again fail on errors
- else
- # Should we return 1 here and indicate an error, or attempt the old method?
- logMessage "No DHCP/BOOTP packet found on interface. Attempting legacy configuration."
- fi
- fi
-
- unset sDomainName
- unset sNameServer
- unset aNameServers
-
- sDomainName=`ipconfig getoption $dev domain_name 2>/dev/null`
- sNameServer=`ipconfig getoption $dev domain_name_server 2>/dev/null`
-
- sDomainName="$(trim "$sDomainName")"
- sNameServer="$(trim "$sNameServer")"
-
- declare -a aWinsServers=( ) # Declare empty WINS array to avoid any useless error messages
-
- if [ "$sDomainName" -a "$sNameServer" ]; then
- aNameServers[0]=$sNameServer
- logMessage "Retrieved name server [ $sNameServer ], domain name [ $sDomainName ], and no WINS servers"
- setDnsServersAndDomainName aNameServers[@] "$sDomainName" aWinsServers[@]
- elif [ "$sNameServer" ]; then
- aNameServers[0]=$sNameServer
- logMessage "Retrieved name server [ $sNameServer ] and no WINS servers, and using default domain name [ $DEFAULT_DOMAIN_NAME ]"
- setDnsServersAndDomainName aNameServers[@] "$DEFAULT_DOMAIN_NAME" aWinsServers[@]
- elif [ "$sDomainName" ]; then
- logMessage "WARNING: Retrieved domain name [ $sDomainName ] but no name servers from OpenVPN (DHCP), which is not sufficient to make network/DNS configuration changes."
- if ${ARG_MONITOR_NETWORK_CONFIGURATION} ; then
- logMessage "Will NOT monitor for other network configuration changes."
- fi
- else
- logMessage "WARNING: No DNS information received from OpenVPN (DHCP), so no network/DNS configuration changes need to be made."
- if ${ARG_MONITOR_NETWORK_CONFIGURATION} ; then
- logMessage "Will NOT monitor for other network configuration changes."
- fi
- fi
-
- return 0
-}
-
-configureOpenVpnDns()
-{
- unset vForOptions
- unset vOptions
- unset aNameServers
- unset aWinsServers
-
- nOptionIndex=1
- nNameServerIndex=1
- nWinsServerIndex=1
-
- while vForOptions=foreign_option_$nOptionIndex; [ -n "${!vForOptions}" ]; do
- vOptions[nOptionIndex-1]=${!vForOptions}
- case ${vOptions[nOptionIndex-1]} in
- *DOMAIN* )
- sDomainName="$(trim "${vOptions[nOptionIndex-1]//dhcp-option DOMAIN /}")"
- ;;
- *DNS* )
- aNameServers[nNameServerIndex-1]="$(trim "${vOptions[nOptionIndex-1]//dhcp-option DNS /}")"
- let nNameServerIndex++
- ;;
- *WINS* )
- aWinsServers[nWinsServerIndex-1]="$(trim "${vOptions[nOptionIndex-1]//dhcp-option WINS /}")"
- let nWinsServerIndex++
- ;;
- * )
- logMessage "Unknown: 'foreign_option_${nOptionIndex}' = '${vOptions[nOptionIndex-1]}'"
- ;;
- esac
- let nOptionIndex++
- done
-
- if [ ${#aNameServers[*]} -gt 0 -a "$sDomainName" ]; then
- logMessage "Retrieved name server(s) [ ${aNameServers[@]} ], domain name [ $sDomainName ], and WINS server(s) [ ${aWinsServers[@]} ]"
- setDnsServersAndDomainName aNameServers[@] "$sDomainName" aWinsServers[@]
- elif [ ${#aNameServers[*]} -gt 0 ]; then
- logMessage "Retrieved name server(s) [ ${aNameServers[@]} ] and WINS server(s) [ ${aWinsServers[@]} ] and using default domain name [ $DEFAULT_DOMAIN_NAME ]"
- setDnsServersAndDomainName aNameServers[@] "$DEFAULT_DOMAIN_NAME" aWinsServers[@]
- else
- # Should we maybe just return 1 here to indicate an error? Does this mean that something bad has happened?
- logMessage "No DNS information recieved from OpenVPN, so no network configuration changes need to be made."
- if ${ARG_MONITOR_NETWORK_CONFIGURATION} ; then
- logMessage "Will NOT monitor for other network configuration changes."
- fi
- fi
-
- return 0
-}
-
-# We sleep here to allow time for OS X to process network settings
-sleep 2
-
-EXIT_CODE=0
-
-if ${ARG_TAP} ; then
- # Still need to do: Look for route-gateway dhcp (TAP isn't always DHCP)
- bRouteGatewayIsDhcp="false"
- if [ -z "${route_vpn_gateway}" -o "$route_vpn_gateway" == "dhcp" -o "$route_vpn_gateway" == "DHCP" ]; then
- bRouteGatewayIsDhcp="true"
- fi
-
- if [ "$bRouteGatewayIsDhcp" == "true" ]; then
- if [ -z "$dev" ]; then
- logMessage "Cannot configure TAP interface for DHCP without \$dev being defined. Exiting."
- exit 1
- fi
-
- ipconfig set "$dev" DHCP
-
- configureDhcpDns &
- elif [ "$foreign_option_1" == "" ]; then
- logMessage "No network configuration changes need to be made."
- if ${ARG_MONITOR_NETWORK_CONFIGURATION} ; then
- logMessage "Will NOT monitor for other network configuration changes."
- fi
- else
- configureOpenVpnDns
- EXIT_CODE=$?
- fi
-else
- if [ "$foreign_option_1" == "" ]; then
- logMessage "No network configuration changes need to be made."
- if ${ARG_MONITOR_NETWORK_CONFIGURATION} ; then
- logMessage "Will NOT monitor for other network configuration changes."
- fi
- else
- configureOpenVpnDns
- EXIT_CODE=$?
- fi
-fi
-
-exit $EXIT_CODE
diff --git a/pkg/osx/install/install-leapc.sh b/pkg/osx/install/install-leapc.sh
deleted file mode 100755
index 2ecfc08e..00000000
--- a/pkg/osx/install/install-leapc.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/sh
-echo "Installing LEAP Client in /Applications"
-cp -r "LEAP Client.app" "/Applications"
-
-echo "Copying openvpn binary"
-cp -r openvpn.leap /usr/bin
-
-echo "Installing tun/tap drivers"
-cp -r Extensions/* /Library/Extensions
-cp -r StartupItems/* /Library/StartupItems
-
-echo "Loading tun/tap kernel extension"
-/Library/StartupItems/tun/tun start
-
-echo "Installation Finished!"
-
-ln -s /Applications/LEAP\ Client.app/ /Volumes/LEAP\ Client\ installer/
diff --git a/pkg/osx/install/leap-installer.platypus b/pkg/osx/install/leap-installer.platypus
deleted file mode 100644
index 9150961e..00000000
--- a/pkg/osx/install/leap-installer.platypus
+++ /dev/null
@@ -1,90 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
-<plist version="1.0">
-<dict>
- <key>AcceptsFiles</key>
- <true/>
- <key>AcceptsText</key>
- <false/>
- <key>Authentication</key>
- <true/>
- <key>Author</key>
- <string>Kali Yuga</string>
- <key>BundledFiles</key>
- <array/>
- <key>Creator</key>
- <string>Platypus-4.7</string>
- <key>DeclareService</key>
- <false/>
- <key>Destination</key>
- <string>MyPlatypusApp.app</string>
- <key>DestinationOverride</key>
- <false/>
- <key>DevelopmentVersion</key>
- <false/>
- <key>DocIcon</key>
- <string></string>
- <key>Droppable</key>
- <false/>
- <key>ExecutablePath</key>
- <string>/opt/local/share/platypus/ScriptExec</string>
- <key>FileTypes</key>
- <array>
- <string>****</string>
- <string>fold</string>
- </array>
- <key>IconPath</key>
- <string></string>
- <key>Identifier</key>
- <string>se.leap.LEAPClientInstaller</string>
- <key>Interpreter</key>
- <string>/bin/sh</string>
- <key>InterpreterArgs</key>
- <array/>
- <key>Name</key>
- <string>LEAPClient Installer</string>
- <key>NibPath</key>
- <string>/opt/local/share/platypus/MainMenu.nib</string>
- <key>OptimizeApplication</key>
- <true/>
- <key>Output</key>
- <string>Progress Bar</string>
- <key>RemainRunning</key>
- <true/>
- <key>Role</key>
- <string>Viewer</string>
- <key>ScriptArgs</key>
- <array/>
- <key>ScriptPath</key>
- <string>./install/install-leapc.sh</string>
- <key>Secure</key>
- <false/>
- <key>ShowInDock</key>
- <false/>
- <key>StatusItemDisplayType</key>
- <string>Text</string>
- <key>StatusItemIcon</key>
- <data>
- </data>
- <key>StatusItemTitle</key>
- <string>MyPlatypusApp</string>
- <key>Suffixes</key>
- <array>
- <string>*</string>
- </array>
- <key>TextBackground</key>
- <string>#ffffff</string>
- <key>TextEncoding</key>
- <integer>4</integer>
- <key>TextFont</key>
- <string>Monaco</string>
- <key>TextForeground</key>
- <string>#000000</string>
- <key>TextSize</key>
- <real>10</real>
- <key>UseXMLPlistFormat</key>
- <true/>
- <key>Version</key>
- <string>1.0</string>
-</dict>
-</plist>
diff --git a/pkg/osx/leap-client.spec b/pkg/osx/leap-client.spec
deleted file mode 100644
index 75bf991b..00000000
--- a/pkg/osx/leap-client.spec
+++ /dev/null
@@ -1,36 +0,0 @@
-# -*- mode: python -*-
-a = Analysis(['../../src/leap/app.py'],
- pathex=[
- '../../src/leap',
- '/Users/kaliy/leap/leap-client-testbuild/src/leap-client/pkg/osx'],
- hiddenimports=['atexit'],
- hookspath=None)
-pyz = PYZ(a.pure)
-exe = EXE(pyz,
- a.scripts,
- exclude_binaries=1,
- name=os.path.join('build/pyi.darwin/leap-client', 'app'),
- debug=False,
- strip=True,
- upx=True,
- console=False)
-coll = COLLECT(exe,
- a.binaries +
- # this will easitly break if we setup the venv
- # somewhere else. FIXME
- [('cacert.pem', '../../../../lib/python2.6/site-packages/requests/cacert.pem', 'DATA'),
- ],
- a.zipfiles,
- a.datas,
- strip=True,
- upx=True,
- name=os.path.join('dist', 'app'))
-app = BUNDLE(coll,
- name=os.path.join('dist', 'leap-client.app'))
-
-import sys
-if sys.platform.startswith("darwin"):
- app = BUNDLE(coll,
- name=os.path.join('dist', 'LEAP Client.app'),
- appname='LEAP Client',
- version=1)
diff --git a/pkg/postmkvenv.sh b/pkg/postmkvenv.sh
deleted file mode 100755
index a503052a..00000000
--- a/pkg/postmkvenv.sh
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/bin/bash
-# This hook is run after a new virtualenv is activated.
-# ~/.virtualenvs/postmkvirtualenv
-# tested and working in debian
-
-# Symlinks PyQt4 from global installation into virtualenv site-packages
-# XXX TODO:
-# script fails in ubuntu, with path: /usr/lib/pymodules/python2.7/PyQt4
-# use import PyQt4; PyQt4.__path__ instead
-
-platform='unknown'
-unamestr=`uname`
-if [[ "$unamestr" == 'Linux' ]]; then
- platform='linux'
-elif [[ "$unamestr" == 'Darwin' ]]; then
- platform='darwin'
-fi
-
-LIBS=( PyQt4 sip.so )
-
-PYTHON_VERSION=python$(python -c "import sys; print (str(sys.version_info[0])+'.'+str(sys.version_info[1]))")
-VAR=( $(which -a $PYTHON_VERSION) )
-
-GET_PYTHON_LIB_CMD="from distutils.sysconfig import get_python_lib; print (get_python_lib())"
-LIB_VIRTUALENV_PATH=$(python -c "$GET_PYTHON_LIB_CMD")
-
-if [[ $platform == 'linux' ]]; then
- LIB_SYSTEM_PATH=$(${VAR[-1]} -c "$GET_PYTHON_LIB_CMD")
-elif [[ $platform == 'darwin' ]]; then
- LIB_SYSTEM_PATH=$(/opt/local/bin/python2.6 -c "$GET_PYTHON_LIB_CMD")
-else
- echo "unsupported platform; not doing symlinks"
-fi
-
-for LIB in ${LIBS[@]}
-do
- if [[ ! -e $LIB_VIRTUALENV_PATH/$LIB ]]; then
- ln -s $LIB_SYSTEM_PATH/$LIB $LIB_VIRTUALENV_PATH/$LIB
- fi
-done
diff --git a/pkg/requirements.pip b/pkg/requirements.pip
deleted file mode 100644
index 13c79b19..00000000
--- a/pkg/requirements.pip
+++ /dev/null
@@ -1,33 +0,0 @@
-# in order of addition to the project.
-# do not change the ordering.
-
-argparse # only for python 2.6
-requests<1.0.0
-psutil
-netifaces
-pyopenssl
-jsonschema
-srp>=1.0.2
-pycrypto
-keyring
-python-dateutil
-sh
-pyxdg
-
-pygeoip # optional
-
-# email
-
-zope.interface
-twisted>=12.3.0
-
-# soledad deps -- will move to its own repo soon
-python-gnupg
-u1db
-oauth
-couchdb
-hmac
-
-# tenporary pysqlcipher package
-# XXX not installing OK, it needs python src/pysqlcypher/setup.py install
--e git://rhizolab.org/pysqlcipher.git#egg=pysqlcipher
diff --git a/pkg/scripts/leap_client_bootstrap.sh b/pkg/scripts/leap_client_bootstrap.sh
deleted file mode 100644
index 6c302d3f..00000000
--- a/pkg/scripts/leap_client_bootstrap.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-
-# Installs requirements, and
-# clones the latest leap-client
-
-# depends on:
-# openvpn git-core libgnutls-dev python-dev python-qt4 python-setuptools python-virtualenv
-
-# Escape code
-esc=`echo -en "\033"`
-
-# Set colors
-cc_green="${esc}[0;32m"
-cc_yellow="${esc}[0;33m"
-cc_blue="${esc}[0;34m"
-cc_red="${esc}[0;31m"
-cc_normal=`echo -en "${esc}[m\017"`
-
-echo "${cc_yellow}"
-echo "~~~~~~~~~~~~~~~~~~~~~~"
-echo "LEAP "
-echo "client bootstrapping "
-echo "~~~~~~~~~~~~~~~~~~~~~~"
-echo ""
-echo "${cc_green}Creating virtualenv...${cc_normal}"
-
-mkdir leap-client-testbuild
-virtualenv leap-client-testbuild
-source leap-client-testbuild/bin/activate
-
-echo "${cc_green}Installing leap client...${cc_normal}"
-
-# Clone latest git (develop branch)
-# change "develop" for any other branch you want.
-
-
-pip install -e 'git://leap.se/leap_client@develop#egg=leap-client'
-
-cd leap-client-testbuild
-
-# symlink the pyqt libraries to the system libs
-./src/leap-client/pkg/postmkvenv.sh
-
-echo "${cc_green}leap-client installed! =)"
-echo "${cc_yellow}"
-echo "Launch it with: "
-echo "~~~~~~~~~~~~~~~~~~~~~~"
-echo "bin/leap-client"
-echo "~~~~~~~~~~~~~~~~~~~~~~"
-echo "${cc_normal}"
diff --git a/pkg/test-requirements.pip b/pkg/test-requirements.pip
deleted file mode 100644
index d60439ea..00000000
--- a/pkg/test-requirements.pip
+++ /dev/null
@@ -1,14 +0,0 @@
-six>=1.1,<1.2 # soledad req (nose2)
-unittest2 # TODO we should include this dep only for python2.6
-coverage
-mock
-nose
-pep8==1.1
-sphinx>=1.1.2
-nose-exclude
-tox
-
-# for soledad * to be splitted *
-nose2
-testscenarios
-testtools
diff --git a/pkg/tools/with_venv.sh b/pkg/tools/with_venv.sh
deleted file mode 100755
index 0e58f1ab..00000000
--- a/pkg/tools/with_venv.sh
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-TOOLS=`dirname $0`
-VENV=$TOOLS/../../.venv
-source $VENV/bin/activate && $@
diff --git a/pkg/utils.py b/pkg/utils.py
deleted file mode 100644
index 52680ae5..00000000
--- a/pkg/utils.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""
-utils to help in the setup process
-"""
-import os
-import re
-import sys
-
-
-# gets reqs from the first matching file
-def get_reqs_from_files(reqfiles):
- for reqfile in reqfiles:
- if os.path.isfile(reqfile):
- return open(reqfile, 'r').read().split('\n')
-
-
-def parse_requirements(reqfiles=['requirements.txt',
- 'requirements.pip',
- 'pkg/requirements.pip']):
- requirements = []
- for line in get_reqs_from_files(reqfiles):
- # -e git://foo.bar/baz/master#egg=foobar
- if re.match(r'\s*-e\s+', line):
- requirements.append(re.sub(r'\s*-e\s+.*#egg=(.*)$', r'\1',
- line))
- # http://foo.bar/baz/foobar/zipball/master#egg=foobar
- elif re.match(r'\s*https?:', line):
- requirements.append(re.sub(r'\s*https?:.*#egg=(.*)$', r'\1',
- line))
- # -f lines are for index locations, and don't get used here
- elif re.match(r'\s*-f\s+', line):
- pass
-
- # argparse is part of the standard library starting with 2.7
- # adding it to the requirements list screws distro installs
- elif line == 'argparse' and sys.version_info >= (2, 7):
- pass
- else:
- if line != '':
- requirements.append(line)
-
- #print 'REQUIREMENTS', requirements
- return requirements
diff --git a/run_tests.sh b/run_tests.sh
deleted file mode 100755
index 7cbed018..00000000
--- a/run_tests.sh
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/bin/bash
-
-set -eu
-
-function usage {
- echo "Usage: $0 [OPTION]..."
- echo "Run leap-client test suite"
- echo ""
- echo " -V, --virtual-env Always use virtualenv. Install automatically if not present"
- echo " -N, --no-virtual-env Don't use virtualenv. Run tests in local environment"
- echo " -s, --no-site-packages Isolate the virtualenv from the global Python environment"
- echo " -x, --stop Stop running tests after the first error or failure."
- echo " -f, --force Force a clean re-build of the virtual environment. Useful when dependencies have been added."
- echo " -p, --pep8 Just run pep8"
- echo " -P, --no-pep8 Don't run pep8"
- echo " -c, --coverage Generate coverage report"
- echo " -h, --help Print this usage message"
- echo " -A, --all Run all tests, without excluding any"
- echo " --hide-elapsed Don't print the elapsed time for each test along with slow test list"
- echo ""
- echo "Note: with no options specified, the script will try to run the tests in a virtual environment,"
- echo " If no virtualenv is found, the script will ask if you would like to create one. If you "
- echo " prefer to run tests NOT in a virtual environment, simply pass the -N option."
- exit
-}
-
-function process_option {
- case "$1" in
- -h|--help) usage;;
- -V|--virtual-env) always_venv=1; never_venv=0;;
- -N|--no-virtual-env) always_venv=0; never_venv=1;;
- -s|--no-site-packages) no_site_packages=1;;
- -f|--force) force=1;;
- -p|--pep8) just_pep8=1;;
- -P|--no-pep8) no_pep8=1;;
- -c|--coverage) coverage=1;;
- -A|--all) alltests=1;;
- -*) noseopts="$noseopts $1";;
- *) noseargs="$noseargs $1"
- esac
-}
-
-venv=.venv
-with_venv=pkg/tools/with_venv.sh
-always_venv=0
-never_venv=0
-force=0
-no_site_packages=0
-installvenvopts=
-noseargs=
-noseopts=
-wrapper=""
-just_pep8=0
-no_pep8=0
-coverage=0
-alltests=0
-
-for arg in "$@"; do
- process_option $arg
-done
-
-# If enabled, tell nose to collect coverage data
-if [ $coverage -eq 1 ]; then
- noseopts="$noseopts --with-coverage --cover-package=leap-client"
-fi
-
-if [ $no_site_packages -eq 1 ]; then
- installvenvopts="--no-site-packages"
-fi
-
-# If alltests flag is not set, let's exclude some dirs that are troublesome.
-if [ $alltests -eq 0 ]; then
- noseopts="$noseopts --exclude-dir=src/leap/soledad"
-fi
-
-function run_tests {
- # Just run the test suites in current environment
- ${wrapper} $NOSETESTS
- # If we get some short import error right away, print the error log directly
- RESULT=$?
- return $RESULT
-}
-
-function run_pep8 {
- echo "Running pep8 ..."
- srcfiles="src/leap tests"
- # Just run PEP8 in current environment
- pep8_opts="--ignore=E202,W602 --exclude=*_rc.py,_version.py --repeat"
- ${wrapper} pep8 ${pep8_opts} ${srcfiles}
-}
-
-# XXX we cannot run tests that need X server
-# in the current debhelper build process,
-# so I exclude the topmost tests
-
-NOSETESTS="nosetests leap $noseopts $noseargs"
-
-if [ $never_venv -eq 0 ]
-then
- # Remove the virtual environment if --force used
- if [ $force -eq 1 ]; then
- echo "Cleaning virtualenv..."
- rm -rf ${venv}
- fi
- if [ -e ${venv} ]; then
- wrapper="${with_venv}"
- else
- if [ $always_venv -eq 1 ]; then
- # Automatically install the virtualenv
- python pkg/install_venv.py $installvenvopts
- wrapper="${with_venv}"
- else
- echo -e "No virtual environment found...create one? (Y/n) \c"
- read use_ve
- if [ "x$use_ve" = "xY" -o "x$use_ve" = "x" -o "x$use_ve" = "xy" ]; then
- # Install the virtualenv and run the test suite in it
- python pkg/install_venv.py $installvenvopts
- wrapper=${with_venv}
- fi
- fi
- fi
-fi
-
-# Delete old coverage data from previous runs
-if [ $coverage -eq 1 ]; then
- ${wrapper} coverage erase
-fi
-
-if [ $just_pep8 -eq 1 ]; then
- run_pep8
- exit
-fi
-
-run_tests
-
-if [ -z "$noseargs" ]; then
- if [ $no_pep8 -eq 0 ]; then
- run_pep8
- fi
-fi
-
-function run_coverage {
- cov_opts="--omit=`pwd`/src/leap/base/tests/*,`pwd`/src/leap/eip/tests/*,`pwd`/src/leap/gui/tests/*"
- cov_opts="$cov_opts,`pwd`/src/leap/util/tests/* "
- cov_opts="$cov_opts --include=`pwd`/src/leap/*" #,`pwd`/src/leap/eip/*"
- ${wrapper} coverage html -d docs/covhtml -i $cov_opts
- echo "now point your browser at docs/covhtml/index.html"
-}
-
-if [ $coverage -eq 1 ]; then
- echo "Generating coverage report in docs/covhtml/"
- run_coverage
- exit
-fi
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 4b049f97..00000000
--- a/setup.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[egg_info]
-#tag_build = dev
diff --git a/setup.py b/setup.py
deleted file mode 100755
index 64c2a4f5..00000000
--- a/setup.py
+++ /dev/null
@@ -1,222 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-from __future__ import print_function
-import sys
-
-try:
- from setuptools import setup, find_packages
-except ImportError:
- from pkg import distribute_setup
- distribute_setup.use_setuptools()
- from setuptools import setup, find_packages
-import os
-
-from pkg import utils
-from pkg import branding
-import versioneer
-versioneer.versionfile_source = 'src/leap/_version.py'
-versioneer.versionfile_build = 'leap/_version.py'
-versioneer.tag_prefix = '' # tags are like 1.2.0
-#versioneer.parentdir_prefix = 'leap_client-'
-versioneer.parentdir_prefix = branding.APP_PREFIX
-
-branding.brandingfile = 'src/leap/_branding.py'
-branding.brandingfile_build = 'leap/_branding.py'
-branding.cert_path = 'src/leap/certs'
-
-setup_root = os.path.dirname(__file__)
-sys.path.insert(0, os.path.join(setup_root, "src"))
-
-trove_classifiers = [
- "Development Status :: 3 - Alpha",
- "Environment :: X11 Applications :: Qt",
- "Intended Audience :: End Users/Desktop",
- ("License :: OSI Approved :: GNU General "
- "Public License v3 or later (GPLv3+)"),
- "Operating System :: OS Independent",
- "Programming Language :: Python",
- "Programming Language :: Python :: 2.6",
- "Programming Language :: Python :: 2.7",
- "Topic :: Communications",
- "Topic :: Security",
- "Topic :: System :: Networking",
- "Topic :: Utilities"
-]
-
-BRANDING_OPTS = """
-# Do NOT manually edit this file!
-# This file has been written from pkg/branding/config.py data by leap setup.py
-# script.
-
-BRANDING = {
- 'short_name': "%(short_name)s",
- 'provider_domain': "%(provider_domain)s",
- 'provider_ca_file': "%(provider_ca_file)s"}
-"""
-
-
-def write_to_branding_file(filename, branding_dict):
- f = open(filename, "w")
- f.write(BRANDING_OPTS % branding_dict)
- f.close()
-
-
-def copy_pemfile_to_certdir(frompath, topath):
- with open(frompath, "r") as cert_f:
- cert_s = cert_f.read()
- with open(topath, "w") as f:
- f.write(cert_s)
-
-
-def do_branding(targetfile=branding.brandingfile):
- if branding.BRANDED_BUILD:
- opts = branding.BRANDED_OPTS
- print("DOING BRANDING FOR LEAP")
- certpath = opts['provider_ca_path']
- shortname = opts['short_name']
- tocertfile = shortname + '-cacert.pem'
- topath = os.path.join(
- branding.cert_path,
- tocertfile)
- copy_pemfile_to_certdir(
- certpath,
- topath)
- opts['provider_ca_file'] = tocertfile
- write_to_branding_file(
- targetfile,
- opts)
- else:
- print('not running branding because BRANDED_BUILD set to False')
-
-
-from setuptools import Command
-
-
-class DoBranding(Command):
- description = "copy the branding info the the top level package"
- user_options = []
-
- def initialize_options(self):
- pass
-
- def finalize_options(self):
- pass
-
- def run(self):
- do_branding()
-
-from distutils.command.build import build as _build
-from distutils.command.sdist import sdist as _sdist
-
-
-class cmd_build(_build):
- def run(self):
- #versioneer.cmd_build(self)
- _build.run(self)
-
- # versioneer
- versions = versioneer.get_versions(verbose=True)
- # now locate _version.py in the new build/ directory and replace it
- # with an updated value
- target_versionfile = os.path.join(
- self.build_lib,
- versioneer.versionfile_build)
- print("UPDATING %s" % target_versionfile)
- os.unlink(target_versionfile)
- f = open(target_versionfile, "w")
- f.write(versioneer.SHORT_VERSION_PY % versions)
- f.close()
-
- # branding
- target_brandingfile = os.path.join(
- self.build_lib,
- branding.brandingfile_build)
- do_branding(targetfile=target_brandingfile)
-
-
-class cmd_sdist(_sdist):
- def run(self):
- # versioneer:
- versions = versioneer.get_versions(verbose=True)
- self._versioneer_generated_versions = versions
- # unless we update this, the command will keep using the old version
- self.distribution.metadata.version = versions["version"]
-
- # branding:
- do_branding()
- return _sdist.run(self)
-
- def make_release_tree(self, base_dir, files):
- _sdist.make_release_tree(self, base_dir, files)
- # now locate _version.py in the new base_dir directory (remembering
- # that it may be a hardlink) and replace it with an updated value
- target_versionfile = os.path.join(
- base_dir, versioneer.versionfile_source)
- print("UPDATING %s" % target_versionfile)
- os.unlink(target_versionfile)
- f = open(target_versionfile, "w")
- f.write(
- versioneer.SHORT_VERSION_PY % self._versioneer_generated_versions)
- f.close()
-
-cmdclass = versioneer.get_cmdclass()
-cmdclass["branding"] = DoBranding
-
-# Uncomment this to have the branding command run automatically
-# on the build and sdist commands.
-#cmdclass["build"] = cmd_build
-#cmdclass["sdist"] = cmd_sdist
-
-launcher_name = branding.get_shortname()
-if launcher_name:
- leap_launcher = 'leap-%s-client=leap.app:main' % launcher_name
-else:
- leap_launcher = 'leap-client=leap.app:main'
-
-setup(
- name=branding.get_name(),
- package_dir={"": "src"},
- version=versioneer.get_version(),
- cmdclass=cmdclass,
- description="the internet encryption toolkit",
- long_description=(
- "Desktop Client for the LEAP Platform."
- "\n"
- "LEAP (LEAP Encryption Access Project) develops "
- "a multi-year plan to secure everyday communication, breaking down"
- "into discrete services, to be rolled out one at a time.\n"
- "The client for the current phase gives support to the EIP Service."
- "EIP (the Encrypted Internet Proxy) provides circumvention, location "
- "anonymization, and traffic "
- "encryption in a hassle-free, automatically self-configuring fashion, "
- "and has an enhanced level of security."
- ),
- classifiers=trove_classifiers,
- install_requires=utils.parse_requirements(),
- test_suite='nose.collector',
- test_requires=utils.parse_requirements(
- reqfiles=['pkg/test-requirements.pip']),
- keywords='LEAP, client, qt, encryption, proxy, openvpn',
- author='The LEAP Encryption Access Project',
- author_email='info@leap.se',
- url='https://leap.se',
- license='GPLv3+',
- packages=find_packages(
- 'src',
- exclude=['ez_setup', 'setup', 'examples', 'tests']),
- include_package_data=True,
- zip_safe=False,
-
- # not being used since setuptools does not like it.
- # XXX it should be only for linux!
- data_files=[
- ("share/man/man1",
- ["docs/man/leap.1"]),
- ("share/polkit-1/actions",
- ["pkg/linux/polkit/net.openvpn.gui.leap.policy"])
- ],
- platforms="all",
- entry_points={
- 'console_scripts': [leap_launcher]
- },
-)
diff --git a/src/leap/__init__.py b/src/leap/__init__.py
deleted file mode 100644
index 2adbb34a..00000000
--- a/src/leap/__init__.py
+++ /dev/null
@@ -1,36 +0,0 @@
-"""
-LEAP Encryption Access Project
-website: U{https://leap.se/}
-"""
-
-from leap import eip
-from leap import baseapp
-from leap import util
-from leap import soledad
-
-__all__ = [eip, baseapp, util, soledad]
-
-__version__ = "unknown"
-try:
- from ._version import get_versions
- __version__ = get_versions()['version']
- del get_versions
-except ImportError:
- #running on a tree that has not run
- #the setup.py setver
- pass
-
-__appname__ = "unknown"
-try:
- from leap._appname import __appname__
-except ImportError:
- #running on a tree that has not run
- #the setup.py setver
- pass
-
-__full_version__ = __appname__ + '/' + str(__version__)
-
-try:
- from leap._branding import BRANDING as __branding
-except ImportError:
- __branding = {}
diff --git a/src/leap/_version.py b/src/leap/_version.py
deleted file mode 100644
index c33430ea..00000000
--- a/src/leap/_version.py
+++ /dev/null
@@ -1,197 +0,0 @@
-
-IN_LONG_VERSION_PY = True
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (build by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain. Generated by
-# versioneer-0.7+ (https://github.com/warner/python-versioneer)
-
-# these strings will be replaced by git during git-archive
-git_refnames = "$Format:%d$"
-git_full = "$Format:%H$"
-
-
-import subprocess
-import sys
-
-def run_command(args, cwd=None, verbose=False):
- try:
- # remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
- except EnvironmentError:
- e = sys.exc_info()[1]
- if verbose:
- print("unable to run %s" % args[0])
- print(e)
- return None
- stdout = p.communicate()[0].strip()
- if sys.version >= '3':
- stdout = stdout.decode()
- if p.returncode != 0:
- if verbose:
- print("unable to run %s (error)" % args[0])
- return None
- return stdout
-
-
-import sys
-import re
-import os.path
-
-def get_expanded_variables(versionfile_source):
- # the code embedded in _version.py can just fetch the value of these
- # variables. When used from setup.py, we don't want to import
- # _version.py, so we do it with a regexp instead. This function is not
- # used from _version.py.
- variables = {}
- try:
- for line in open(versionfile_source,"r").readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- variables["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- variables["full"] = mo.group(1)
- except EnvironmentError:
- pass
- return variables
-
-def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
- refnames = variables["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("variables are unexpanded, not using")
- return {} # unexpanded, so not in an unpacked git-archive tarball
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
- for ref in list(refs):
- if not re.search(r'\d', ref):
- if verbose:
- print("discarding '%s', no digits" % ref)
- refs.discard(ref)
- # Assume all version tags have a digit. git's %d expansion
- # behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us
- # distinguish between branches and tags. By ignoring refnames
- # without digits, we filter out many common branch names like
- # "release" and "stabilization", as well as "HEAD" and "master".
- if verbose:
- print("remaining refs: %s" % ",".join(sorted(refs)))
- for ref in sorted(refs):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- if verbose:
- print("picking %s" % r)
- return { "version": r,
- "full": variables["full"].strip() }
- # no suitable tags, so we use the full revision id
- if verbose:
- print("no suitable tags, using full revision id")
- return { "version": variables["full"].strip(),
- "full": variables["full"].strip() }
-
-def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
- # this runs 'git' from the root of the source tree. That either means
- # someone ran a setup.py command (and this code is in versioneer.py, so
- # IN_LONG_VERSION_PY=False, thus the containing directory is the root of
- # the source tree), or someone ran a project-specific entry point (and
- # this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
- # containing directory is somewhere deeper in the source tree). This only
- # gets called if the git-archive 'subst' variables were *not* expanded,
- # and _version.py hasn't already been rewritten with a short version
- # string, meaning we're inside a checked out source tree.
-
- try:
- here = os.path.abspath(__file__)
- except NameError:
- # some py2exe/bbfreeze/non-CPython implementations don't do __file__
- return {} # not always correct
-
- # versionfile_source is the relative path from the top of the source tree
- # (where the .git directory might live) to this file. Invert this to find
- # the root from __file__.
- root = here
- if IN_LONG_VERSION_PY:
- for i in range(len(versionfile_source.split("/"))):
- root = os.path.dirname(root)
- else:
- root = os.path.dirname(here)
- if not os.path.exists(os.path.join(root, ".git")):
- if verbose:
- print("no .git in %s" % root)
- return {}
-
- GIT = "git"
- if sys.platform == "win32":
- GIT = "git.cmd"
- stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
- cwd=root)
- if stdout is None:
- return {}
- if not stdout.startswith(tag_prefix):
- if verbose:
- print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
- return {}
- tag = stdout[len(tag_prefix):]
- stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
- if stdout is None:
- return {}
- full = stdout.strip()
- if tag.endswith("-dirty"):
- full += "-dirty"
- return {"version": tag, "full": full}
-
-
-def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
- if IN_LONG_VERSION_PY:
- # We're running from _version.py. If it's from a source tree
- # (execute-in-place), we can work upwards to find the root of the
- # tree, and then check the parent directory for a version string. If
- # it's in an installed application, there's no hope.
- try:
- here = os.path.abspath(__file__)
- except NameError:
- # py2exe/bbfreeze/non-CPython don't have __file__
- return {} # without __file__, we have no hope
- # versionfile_source is the relative path from the top of the source
- # tree to _version.py. Invert this to find the root from __file__.
- root = here
- for i in range(len(versionfile_source.split("/"))):
- root = os.path.dirname(root)
- else:
- # we're running from versioneer.py, which means we're running from
- # the setup.py in a source tree. sys.argv[0] is setup.py in the root.
- here = os.path.abspath(sys.argv[0])
- root = os.path.dirname(here)
-
- # Source tarballs conventionally unpack into a directory that includes
- # both the project name and a version string.
- dirname = os.path.basename(root)
- if not dirname.startswith(parentdir_prefix):
- if verbose:
- print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
- (root, dirname, parentdir_prefix))
- return None
- return {"version": dirname[len(parentdir_prefix):], "full": ""}
-
-tag_prefix = ""
-parentdir_prefix = "leap_client-"
-versionfile_source = "src/leap/_version.py"
-
-def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
- variables = { "refnames": git_refnames, "full": git_full }
- ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
- if not ver:
- ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
- if not ver:
- ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
- verbose)
- if not ver:
- ver = default
- return ver
-
diff --git a/src/leap/app.py b/src/leap/app.py
deleted file mode 100644
index eb38751c..00000000
--- a/src/leap/app.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
-from functools import partial
-import logging
-import signal
-
-# This is only needed for Python v2 but is harmless for Python v3.
-import sip
-sip.setapi('QVariant', 2)
-sip.setapi('QString', 2)
-from PyQt4.QtGui import (QApplication, QSystemTrayIcon, QMessageBox)
-from PyQt4 import QtCore
-
-from leap import __version__ as VERSION
-from leap.baseapp.mainwindow import LeapWindow
-from leap.gui import locale_rc
-
-
-def sigint_handler(*args, **kwargs):
- logger = kwargs.get('logger', None)
- logger.debug('SIGINT catched. shutting down...')
- mainwindow = args[0]
- mainwindow.shutdownSignal.emit()
-
-
-def main():
- """
- launches the main event loop
- long live to the (hidden) leap window!
- """
- import sys
- from leap.util import leap_argparse
- parser, opts = leap_argparse.init_leapc_args()
- debug = getattr(opts, 'debug', False)
-
- # XXX get severity from command line args
- if debug:
- level = logging.DEBUG
- else:
- level = logging.WARNING
-
- logger = logging.getLogger(name='leap')
- logger.setLevel(level)
- console = logging.StreamHandler()
- console.setLevel(level)
- formatter = logging.Formatter(
- '%(asctime)s '
- '- %(name)s - %(levelname)s - %(message)s')
- console.setFormatter(formatter)
- logger.addHandler(console)
-
- logger.info('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
- logger.info('LEAP client version %s', VERSION)
- logger.info('~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~')
- logfile = getattr(opts, 'log_file', False)
- if logfile:
- logger.debug('setting logfile to %s ', logfile)
- fileh = logging.FileHandler(logfile)
- fileh.setLevel(logging.DEBUG)
- fileh.setFormatter(formatter)
- logger.addHandler(fileh)
-
- logger.info('Starting app')
- app = QApplication(sys.argv)
-
- # To test:
- # $ LANG=es ./app.py
- locale = QtCore.QLocale.system().name()
- qtTranslator = QtCore.QTranslator()
- if qtTranslator.load("qt_%s" % locale, ":/translations"):
- app.installTranslator(qtTranslator)
- appTranslator = QtCore.QTranslator()
- if appTranslator.load("leap_client_%s" % locale, ":/translations"):
- app.installTranslator(appTranslator)
-
- # needed for initializing qsettings
- # it will write .config/leap/leap.conf
- # top level app settings
- # in a platform independent way
- app.setOrganizationName("leap")
- app.setApplicationName("leap")
- app.setOrganizationDomain("leap.se")
-
- # XXX we could check here
- # if leap-client is already running, and abort
- # gracefully in that case.
-
- if not QSystemTrayIcon.isSystemTrayAvailable():
- QMessageBox.critical(None, "Systray",
- "I couldn't detect"
- "any system tray on this system.")
- sys.exit(1)
- if not debug:
- QApplication.setQuitOnLastWindowClosed(False)
-
- window = LeapWindow(opts)
-
- # this dummy timer ensures that
- # control is given to the outside loop, so we
- # can hook our sigint handler.
- timer = QtCore.QTimer()
- timer.start(500)
- timer.timeout.connect(lambda: None)
-
- sigint_window = partial(sigint_handler, window, logger=logger)
- signal.signal(signal.SIGINT, sigint_window)
-
- if debug:
- # we only show the main window
- # if debug mode active.
- # if not, it will be set visible
- # from the systray menu.
- window.show()
- if sys.platform == "darwin":
- window.raise_()
-
- # run main loop
- sys.exit(app.exec_())
-
-if __name__ == "__main__":
- main()
diff --git a/src/leap/base/__init__.py b/src/leap/base/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/base/__init__.py
+++ /dev/null
diff --git a/src/leap/base/auth.py b/src/leap/base/auth.py
deleted file mode 100644
index c2d3f424..00000000
--- a/src/leap/base/auth.py
+++ /dev/null
@@ -1,355 +0,0 @@
-import binascii
-import json
-import logging
-#import urlparse
-
-import requests
-import srp
-
-from PyQt4 import QtCore
-
-from leap.base import constants as baseconstants
-from leap.crypto import leapkeyring
-from leap.util.misc import null_check
-from leap.util.web import get_https_domain_and_port
-
-logger = logging.getLogger(__name__)
-
-SIGNUP_TIMEOUT = getattr(baseconstants, 'SIGNUP_TIMEOUT', 5)
-
-"""
-Registration and authentication classes for the
-SRP auth mechanism used in the leap platform.
-
-We're using the srp library which uses a c-based implementation
-of the protocol if the c extension is available, and a python-based
-one if not.
-"""
-
-
-class SRPAuthenticationError(Exception):
- """
- exception raised
- for authentication errors
- """
-
-
-safe_unhexlify = lambda x: binascii.unhexlify(x) \
- if (len(x) % 2 == 0) else binascii.unhexlify('0' + x)
-
-
-class LeapSRPRegister(object):
-
- def __init__(self,
- schema="https",
- provider=None,
- verify=True,
- register_path="1/users",
- method="POST",
- fetcher=requests,
- srp=srp,
- hashfun=srp.SHA256,
- ng_constant=srp.NG_1024):
-
- null_check(provider, "provider")
-
- self.schema = schema
-
- domain, port = get_https_domain_and_port(provider)
- self.provider = domain
- self.port = port
-
- self.verify = verify
- self.register_path = register_path
- self.method = method
- self.fetcher = fetcher
- self.srp = srp
- self.HASHFUN = hashfun
- self.NG = ng_constant
-
- self.init_session()
-
- def init_session(self):
- self.session = self.fetcher.session()
-
- def get_registration_uri(self):
- # XXX assert is https!
- # use urlparse
- if self.port:
- uri = "%s://%s:%s/%s" % (
- self.schema,
- self.provider,
- self.port,
- self.register_path)
- else:
- uri = "%s://%s/%s" % (
- self.schema,
- self.provider,
- self.register_path)
-
- return uri
-
- def register_user(self, username, password, keep=False):
- """
- @rtype: tuple
- @rparam: (ok, request)
- """
- salt, vkey = self.srp.create_salted_verification_key(
- username,
- password,
- self.HASHFUN,
- self.NG)
-
- user_data = {
- 'user[login]': username,
- 'user[password_verifier]': binascii.hexlify(vkey),
- 'user[password_salt]': binascii.hexlify(salt)}
-
- uri = self.get_registration_uri()
- logger.debug('post to uri: %s' % uri)
-
- # XXX get self.method
- req = self.session.post(
- uri, data=user_data,
- timeout=SIGNUP_TIMEOUT,
- verify=self.verify)
- # we catch it in the form
- #req.raise_for_status()
- return (req.ok, req)
-
-
-class SRPAuth(requests.auth.AuthBase):
-
- def __init__(self, username, password, server=None, verify=None):
- # sanity check
- null_check(server, 'server')
- self.username = username
- self.password = password
- self.server = server
- self.verify = verify
-
- logger.debug('SRPAuth. verify=%s' % verify)
- logger.debug('server: %s. username=%s' % (server, username))
-
- self.init_data = None
- self.session = requests.session()
-
- self.init_srp()
-
- def init_srp(self):
- usr = srp.User(
- self.username,
- self.password,
- srp.SHA256,
- srp.NG_1024)
- uname, A = usr.start_authentication()
-
- self.srp_usr = usr
- self.A = A
-
- def get_auth_data(self):
- return {
- 'login': self.username,
- 'A': binascii.hexlify(self.A)
- }
-
- def get_init_data(self):
- try:
- init_session = self.session.post(
- self.server + '/1/sessions/',
- data=self.get_auth_data(),
- verify=self.verify)
- except requests.exceptions.ConnectionError:
- raise SRPAuthenticationError(
- "No connection made (salt).")
- except:
- raise SRPAuthenticationError(
- "Unknown error (salt).")
- if init_session.status_code not in (200, ):
- raise SRPAuthenticationError(
- "No valid response (salt).")
-
- self.init_data = init_session.json
- return self.init_data
-
- def get_server_proof_data(self):
- try:
- auth_result = self.session.put(
- #self.server + '/1/sessions.json/' + self.username,
- self.server + '/1/sessions/' + self.username,
- data={'client_auth': binascii.hexlify(self.M)},
- verify=self.verify)
- except requests.exceptions.ConnectionError:
- raise SRPAuthenticationError(
- "No connection made (HAMK).")
-
- if auth_result.status_code not in (200, ):
- raise SRPAuthenticationError(
- "No valid response (HAMK).")
-
- self.auth_data = auth_result.json
- return self.auth_data
-
- def authenticate(self):
- logger.debug('start authentication...')
-
- init_data = self.get_init_data()
- salt = init_data.get('salt', None)
- B = init_data.get('B', None)
-
- # XXX refactor this function
- # move checks and un-hex
- # to routines
-
- if not salt or not B:
- raise SRPAuthenticationError(
- "Server did not send initial data.")
-
- try:
- unhex_salt = safe_unhexlify(salt)
- except TypeError:
- raise SRPAuthenticationError(
- "Bad data from server (salt)")
- try:
- unhex_B = safe_unhexlify(B)
- except TypeError:
- raise SRPAuthenticationError(
- "Bad data from server (B)")
-
- self.M = self.srp_usr.process_challenge(
- unhex_salt,
- unhex_B
- )
-
- proof_data = self.get_server_proof_data()
-
- HAMK = proof_data.get("M2", None)
- if not HAMK:
- errors = proof_data.get('errors', None)
- if errors:
- logger.error(errors)
- raise SRPAuthenticationError("Server did not send HAMK.")
-
- try:
- unhex_HAMK = safe_unhexlify(HAMK)
- except TypeError:
- raise SRPAuthenticationError(
- "Bad data from server (HAMK)")
-
- self.srp_usr.verify_session(
- unhex_HAMK)
-
- try:
- assert self.srp_usr.authenticated()
- logger.debug('user is authenticated!')
- except (AssertionError):
- raise SRPAuthenticationError(
- "Auth verification failed.")
-
- def __call__(self, req):
- self.authenticate()
- req.cookies = self.session.cookies
- return req
-
-
-def srpauth_protected(user=None, passwd=None, server=None, verify=True):
- """
- decorator factory that accepts
- user and password keyword arguments
- and add those to the decorated request
- """
- def srpauth(fn):
- def wrapper(*args, **kwargs):
- if user and passwd:
- auth = SRPAuth(user, passwd, server, verify)
- kwargs['auth'] = auth
- kwargs['verify'] = verify
- if not args:
- logger.warning('attempting to get from empty uri!')
- return fn(*args, **kwargs)
- return wrapper
- return srpauth
-
-
-def get_leap_credentials():
- settings = QtCore.QSettings()
- full_username = settings.value('username')
- username, domain = full_username.split('@')
- seed = settings.value('%s_seed' % domain, None)
- password = leapkeyring.leap_get_password(full_username, seed=seed)
- return (username, password)
-
-
-# XXX TODO
-# Pass verify as single argument,
-# in srpauth_protected style
-
-def magick_srpauth(fn):
- """
- decorator that gets user and password
- from the config file and adds those to
- the decorated request
- """
- logger.debug('magick srp auth decorator called')
-
- def wrapper(*args, **kwargs):
- #uri = args[0]
- # XXX Ugh!
- # Problem with this approach.
- # This won't work when we're using
- # api.foo.bar
- # Unless we keep a table with the
- # equivalencies...
- user, passwd = get_leap_credentials()
-
- # XXX pass verify and server too
- # (pop)
- auth = SRPAuth(user, passwd)
- kwargs['auth'] = auth
- return fn(*args, **kwargs)
- return wrapper
-
-
-if __name__ == "__main__":
- """
- To test against test_provider (twisted version)
- Register an user: (will be valid during the session)
- >>> python auth.py add test password
-
- Test login with that user:
- >>> python auth.py login test password
- """
-
- import sys
-
- if len(sys.argv) not in (4, 5):
- print 'Usage: auth <add|login> <user> <pass> [server]'
- sys.exit(0)
-
- action = sys.argv[1]
- user = sys.argv[2]
- passwd = sys.argv[3]
-
- if len(sys.argv) == 5:
- SERVER = sys.argv[4]
- else:
- SERVER = "https://localhost:8443"
-
- if action == "login":
-
- @srpauth_protected(
- user=user, passwd=passwd, server=SERVER, verify=False)
- def test_srp_protected_get(*args, **kwargs):
- req = requests.get(*args, **kwargs)
- req.raise_for_status
- return req
-
- #req = test_srp_protected_get('https://localhost:8443/1/cert')
- req = test_srp_protected_get('%s/1/cert' % SERVER)
- #print 'cert :', req.content[:200] + "..."
- print req.content
- sys.exit(0)
-
- if action == "add":
- auth = LeapSRPRegister(provider=SERVER, verify=False)
- auth.register_user(user, passwd)
diff --git a/src/leap/base/authentication.py b/src/leap/base/authentication.py
deleted file mode 100644
index 09ff1d07..00000000
--- a/src/leap/base/authentication.py
+++ /dev/null
@@ -1,11 +0,0 @@
-"""
-Authentication Base Class
-"""
-
-
-class Authentication(object):
- """
- I have no idea how Authentication (certs,?)
- will be done, but stub it here.
- """
- pass
diff --git a/src/leap/base/checks.py b/src/leap/base/checks.py
deleted file mode 100644
index 0bf44f59..00000000
--- a/src/leap/base/checks.py
+++ /dev/null
@@ -1,213 +0,0 @@
-# -*- coding: utf-8 -*-
-import logging
-import platform
-import re
-import socket
-
-import netifaces
-import sh
-
-from leap.base import constants
-from leap.base import exceptions
-
-logger = logging.getLogger(name=__name__)
-_platform = platform.system()
-
-#EVENTS OF NOTE
-EVENT_CONNECT_REFUSED = "[ECONNREFUSED]: Connection refused (code=111)"
-
-ICMP_TARGET = "8.8.8.8"
-
-
-class LeapNetworkChecker(object):
- """
- all network related checks
- """
- def __init__(self, *args, **kwargs):
- provider_gw = kwargs.pop('provider_gw', None)
- self.provider_gateway = provider_gw
-
- def run_all(self, checker=None):
- if not checker:
- checker = self
- #self.error = None # ?
-
- # for MVS
- checker.check_tunnel_default_interface()
- checker.check_internet_connection()
- checker.is_internet_up()
-
- if self.provider_gateway:
- checker.ping_gateway(self.provider_gateway)
-
- checker.parse_log_and_react([], ())
-
- def check_internet_connection(self):
- if _platform == "Linux":
- try:
- output = sh.ping("-c", "5", "-w", "5", ICMP_TARGET)
- # XXX should redirect this to netcheck logger.
- # and don't clutter main log.
- logger.debug('Network appears to be up.')
- except sh.ErrorReturnCode_1 as e:
- packet_loss = re.findall("\d+% packet loss", e.message)[0]
- logger.debug("Unidentified Connection Error: " + packet_loss)
- if not self.is_internet_up():
- error = "No valid internet connection found."
- else:
- error = "Provider server appears to be down."
-
- logger.error(error)
- raise exceptions.NoInternetConnection(error)
-
- else:
- raise NotImplementedError
-
- def is_internet_up(self):
- iface, gateway = self.get_default_interface_gateway()
- try:
- self.ping_gateway(self.provider_gateway)
- except exceptions.NoConnectionToGateway:
- return False
- return True
-
- def _get_route_table_linux(self):
- # do not use context manager, tests pass a StringIO
- f = open("/proc/net/route")
- route_table = f.readlines()
- f.close()
- #toss out header
- route_table.pop(0)
- if not route_table:
- raise exceptions.NoDefaultInterfaceFoundError
- return route_table
-
- def _get_def_iface_osx(self):
- default_iface = None
- #gateway = None
- routes = list(sh.route('-n', 'get', ICMP_TARGET, _iter=True))
- iface = filter(lambda l: "interface" in l, routes)
- if not iface:
- return None, None
- def_ifacel = re.findall('\w+\d', iface[0])
- default_iface = def_ifacel[0] if def_ifacel else None
- if not default_iface:
- return None, None
- _gw = filter(lambda l: "gateway" in l, routes)
- gw = re.findall('\d+\.\d+\.\d+\.\d+', _gw[0])[0]
- return default_iface, gw
-
- def _get_tunnel_iface_linux(self):
- # XXX review.
- # valid also when local router has a default entry?
- route_table = self._get_route_table_linux()
- line = route_table.pop(0)
- iface, destination = line.split('\t')[0:2]
- if not destination == '00000000' or not iface == 'tun0':
- raise exceptions.TunnelNotDefaultRouteError()
- return True
-
- def check_tunnel_default_interface(self):
- """
- Raises an TunnelNotDefaultRouteError
- if tun0 is not the chosen default route
- (including when no routes are present)
- """
- #logger.debug('checking tunnel default interface...')
-
- if _platform == "Linux":
- valid = self._get_tunnel_iface_linux()
- return valid
- elif _platform == "Darwin":
- default_iface, gw = self._get_def_iface_osx()
- #logger.debug('iface: %s', default_iface)
- if default_iface != "tun0":
- logger.debug('tunnel not default route! gw: %s', default_iface)
- # XXX should catch this and act accordingly...
- # but rather, this test should only be launched
- # when we have successfully completed a connection
- # ... TRIGGER: Connection stablished (or whatever it is)
- # in the logs
- raise exceptions.TunnelNotDefaultRouteError
- else:
- #logger.debug('PLATFORM !!! %s', _platform)
- raise NotImplementedError
-
- def _get_def_iface_linux(self):
- default_iface = None
- gateway = None
-
- route_table = self._get_route_table_linux()
- while route_table:
- line = route_table.pop(0)
- iface, destination, gateway = line.split('\t')[0:3]
- if destination == '00000000':
- default_iface = iface
- break
- return default_iface, gateway
-
- def get_default_interface_gateway(self):
- """
- gets the interface we are going thru.
- (this should be merged with check tunnel default interface,
- imo...)
- """
- if _platform == "Linux":
- default_iface, gw = self._get_def_iface_linux()
- elif _platform == "Darwin":
- default_iface, gw = self._get_def_iface_osx()
- else:
- raise NotImplementedError
-
- if not default_iface:
- raise exceptions.NoDefaultInterfaceFoundError
-
- if default_iface not in netifaces.interfaces():
- raise exceptions.InterfaceNotFoundError
- logger.debug('-- default iface %s', default_iface)
- return default_iface, gw
-
- def ping_gateway(self, gateway):
- # TODO: Discuss how much packet loss (%) is acceptable.
-
- # XXX -- validate gateway
- # -- is it a valid ip? (there's something in util)
- # -- is it a domain?
- # -- can we resolve? -- raise NoDNSError if not.
-
- # XXX -- sh.ping implemtation needs review!
- try:
- output = sh.ping("-c", "10", gateway).stdout
- except sh.ErrorReturnCode_1 as e:
- output = e.message
- finally:
- packet_loss = int(re.findall("(\d+)% packet loss", output)[0])
-
- logger.debug('packet loss %s%%' % packet_loss)
- if packet_loss > constants.MAX_ICMP_PACKET_LOSS:
- raise exceptions.NoConnectionToGateway
-
- def check_name_resolution(self, domain_name):
- try:
- socket.gethostbyname(domain_name)
- return True
- except socket.gaierror:
- raise exceptions.CannotResolveDomainError
-
- def parse_log_and_react(self, log, error_matrix=None):
- """
- compares the recent openvpn status log to
- strings passed in and executes the callbacks passed in.
- @param log: openvpn log
- @type log: list of strings
- @param error_matrix: tuples of strings and tuples of callbacks
- @type error_matrix: tuples strings and call backs
- """
- for line in log:
- # we could compile a regex here to save some cycles up -- kali
- for each in error_matrix:
- error, callbacks = each
- if error in line:
- for cb in callbacks:
- if callable(cb):
- cb()
diff --git a/src/leap/base/config.py b/src/leap/base/config.py
deleted file mode 100644
index d796bcf1..00000000
--- a/src/leap/base/config.py
+++ /dev/null
@@ -1,348 +0,0 @@
-"""
-Configuration Base Class
-"""
-import grp
-import json
-import logging
-import re
-import socket
-import time
-import os
-
-logger = logging.getLogger(name=__name__)
-
-from dateutil import parser as dateparser
-from xdg import BaseDirectory
-import requests
-
-from leap.base import exceptions
-from leap.base import constants
-from leap.base.pluggableconfig import PluggableConfig
-from leap.util.fileutil import (mkdir_p)
-
-# move to base!
-from leap.eip import exceptions as eipexceptions
-
-
-class BaseLeapConfig(object):
- slug = None
-
- # XXX we have to enforce that every derived class
- # has a slug (via interface)
- # get property getter that raises NI..
-
- def save(self):
- raise NotImplementedError("abstract base class")
-
- def load(self):
- raise NotImplementedError("abstract base class")
-
- def get_config(self, *kwargs):
- raise NotImplementedError("abstract base class")
-
- @property
- def config(self):
- return self.get_config()
-
- def get_value(self, *kwargs):
- raise NotImplementedError("abstract base class")
-
-
-class MetaConfigWithSpec(type):
- """
- metaclass for JSONLeapConfig classes.
- It creates a configuration spec out of
- the `spec` dictionary. The `properties` attribute
- of the spec dict is turn into the `schema` attribute
- of the new class (which will be used to validate against).
- """
- # XXX in the near future, this is the
- # place where we want to enforce
- # singletons, read-only and similar stuff.
-
- def __new__(meta, classname, bases, classDict):
- schema_obj = classDict.get('spec', None)
-
- # not quite happy with this workaround.
- # I want to raise if missing spec dict, but only
- # for grand-children of this metaclass.
- # maybe should use abc module for this.
- abcderived = ("JSONLeapConfig",)
- if schema_obj is None and classname not in abcderived:
- raise exceptions.ImproperlyConfigured(
- "missing spec dict on your derived class (%s)" % classname)
-
- # we create a configuration spec attribute
- # from the spec dict
- config_class = type(
- classname + "Spec",
- (PluggableConfig, object),
- {'options': schema_obj})
- classDict['spec'] = config_class
-
- return type.__new__(meta, classname, bases, classDict)
-
-##########################################################
-# some hacking still in progress:
-
-# Configs have:
-
-# - a slug (from where a filename/folder is derived)
-# - a spec (for validation and defaults).
-# this spec is conformant to the json-schema.
-# basically a dict that will be used
-# for type casting and validation, and defaults settings.
-
-# all config objects, since they are derived from BaseConfig, implement basic
-# useful methods:
-# - save
-# - load
-
-##########################################################
-
-
-class JSONLeapConfig(BaseLeapConfig):
-
- __metaclass__ = MetaConfigWithSpec
-
- def __init__(self, *args, **kwargs):
- # sanity check
- try:
- assert self.slug is not None
- except AssertionError:
- raise exceptions.ImproperlyConfigured(
- "missing slug on JSONLeapConfig"
- " derived class")
- try:
- assert self.spec is not None
- except AssertionError:
- raise exceptions.ImproperlyConfigured(
- "missing spec on JSONLeapConfig"
- " derived class")
- assert issubclass(self.spec, PluggableConfig)
-
- self.domain = kwargs.pop('domain', None)
- self._config = self.spec(format="json")
- self._config.load()
- self.fetcher = kwargs.pop('fetcher', requests)
-
- # mandatory baseconfig interface
-
- def save(self, to=None, force=False):
- """
- force param will skip the dirty check.
- :type force: bool
- """
- # XXX this force=True does not feel to right
- # but still have to look for a better way
- # of dealing with dirtiness and the
- # trick of loading remote config only
- # when newer.
-
- if force:
- do_save = True
- else:
- do_save = self._config.is_dirty()
-
- if do_save:
- if to is None:
- to = self.filename
- folder, filename = os.path.split(to)
- if folder and not os.path.isdir(folder):
- mkdir_p(folder)
- self._config.serialize(to)
- return True
-
- else:
- return False
-
- def load(self, fromfile=None, from_uri=None, fetcher=None,
- force_download=False, verify=True):
-
- if from_uri is not None:
- fetched = self.fetch(
- from_uri,
- fetcher=fetcher,
- verify=verify,
- force_dl=force_download)
- if fetched:
- return
- if fromfile is None:
- fromfile = self.filename
- if os.path.isfile(fromfile):
- self._config.load(fromfile=fromfile)
- else:
- logger.error('tried to load config from non-existent path')
- logger.error('Not Found: %s', fromfile)
-
- def fetch(self, uri, fetcher=None, verify=True, force_dl=False):
- if not fetcher:
- fetcher = self.fetcher
-
- logger.debug('uri: %s (verify: %s)' % (uri, verify))
-
- rargs = (uri, )
- rkwargs = {'verify': verify}
- headers = {}
-
- curmtime = self.get_mtime() if not force_dl else None
- if curmtime:
- logger.debug('requesting with if-modified-since %s' % curmtime)
- headers['if-modified-since'] = curmtime
- rkwargs['headers'] = headers
-
- #request = fetcher.get(uri, verify=verify)
- request = fetcher.get(*rargs, **rkwargs)
- request.raise_for_status()
-
- if request.status_code == 304:
- logger.debug('...304 Not Changed')
- # On this point, we have to assume that
- # we HAD the filename. If that filename is corruct,
- # we should enforce a force_download in the load
- # method above.
- self._config.load(fromfile=self.filename)
- return True
-
- if request.json:
- mtime = None
- last_modified = request.headers.get('last-modified', None)
- if last_modified:
- _mtime = dateparser.parse(last_modified)
- mtime = int(_mtime.strftime("%s"))
- if callable(request.json):
- _json = request.json()
- else:
- # back-compat
- _json = request.json
- self._config.load(json.dumps(_json), mtime=mtime)
- self._config.set_dirty()
- else:
- # not request.json
- # might be server did not announce content properly,
- # let's try deserializing all the same.
- try:
- self._config.load(request.content)
- self._config.set_dirty()
- except ValueError:
- raise eipexceptions.LeapBadConfigFetchedError
-
- return True
-
- def get_mtime(self):
- try:
- _mtime = os.stat(self.filename)[8]
- mtime = time.strftime("%c GMT", time.gmtime(_mtime))
- return mtime
- except OSError:
- return None
-
- def get_config(self):
- return self._config.config
-
- # public methods
-
- def get_filename(self):
- return self._slug_to_filename()
-
- @property
- def filename(self):
- return self.get_filename()
-
- def validate(self, data):
- logger.debug('validating schema')
- self._config.validate(data)
- return True
-
- # private
-
- def _slug_to_filename(self):
- # is this going to work in winland if slug is "foo/bar" ?
- folder, filename = os.path.split(self.slug)
- config_file = get_config_file(filename, folder)
- return config_file
-
- def exists(self):
- return os.path.isfile(self.filename)
-
-
-#
-# utility functions
-#
-# (might be moved to some class as we see fit, but
-# let's remain functional for a while)
-# maybe base.config.util ??
-#
-
-
-def get_config_dir():
- """
- get the base dir for all leap config
- @rparam: config path
- @rtype: string
- """
- home = os.path.expanduser("~")
- if re.findall("leap_tests-[_a-zA-Z0-9]{6}", home):
- # we're inside a test! :)
- return os.path.join(home, ".config/leap")
- else:
- # XXX dirspec is cross-platform,
- # we should borrow some of those
- # routines for osx/win and wrap this call.
- return os.path.join(BaseDirectory.xdg_config_home,
- 'leap')
-
-
-def get_config_file(filename, folder=None):
- """
- concatenates the given filename
- with leap config dir.
- @param filename: name of the file
- @type filename: string
- @rparam: full path to config file
- """
- path = []
- path.append(get_config_dir())
- if folder is not None:
- path.append(folder)
- path.append(filename)
- return os.path.join(*path)
-
-
-def get_default_provider_path():
- default_subpath = os.path.join("providers",
- constants.DEFAULT_PROVIDER)
- default_provider_path = get_config_file(
- '',
- folder=default_subpath)
- return default_provider_path
-
-
-def get_provider_path(domain):
- # XXX if not domain, return get_default_provider_path
- default_subpath = os.path.join("providers", domain)
- provider_path = get_config_file(
- '',
- folder=default_subpath)
- return provider_path
-
-
-def validate_ip(ip_str):
- """
- raises exception if the ip_str is
- not a valid representation of an ip
- """
- socket.inet_aton(ip_str)
-
-
-def get_username():
- try:
- return os.getlogin()
- except OSError as e:
- import pwd
- return pwd.getpwuid(os.getuid())[0]
-
-
-def get_groupname():
- gid = os.getgroups()[-1]
- return grp.getgrgid(gid).gr_name
diff --git a/src/leap/base/connection.py b/src/leap/base/connection.py
deleted file mode 100644
index 41d13935..00000000
--- a/src/leap/base/connection.py
+++ /dev/null
@@ -1,115 +0,0 @@
-"""
-Base Connection Classs
-"""
-from __future__ import (division, unicode_literals, print_function)
-
-import logging
-
-from leap.base.authentication import Authentication
-
-logger = logging.getLogger(name=__name__)
-
-
-class Connection(Authentication):
- # JSONLeapConfig
- #spec = {}
-
- def __init__(self, *args, **kwargs):
- self.connection_state = None
- self.desired_connection_state = None
- #XXX FIXME diamond inheritance gotcha..
- #If you inherit from >1 class,
- #super is only initializing one
- #of the bases..!!
- # I think we better pass config as a constructor
- # parameter -- kali 2012-08-30 04:33
- super(Connection, self).__init__(*args, **kwargs)
-
- def connect(self):
- """
- entry point for connection process
- """
- pass
-
- def disconnect(self):
- """
- disconnects client
- """
- pass
-
- #def shutdown(self):
- #"""
- #shutdown and quit
- #"""
- #self.desired_con_state = self.status.DISCONNECTED
-
- def connection_state(self):
- """
- returns the current connection state
- """
- return self.status.current
-
- def desired_connection_state(self):
- """
- returns the desired_connection state
- """
- return self.desired_connection_state
-
- def get_icon_name(self):
- """
- get icon name from status object
- """
- return self.status.get_state_icon()
-
- #
- # private methods
- #
-
- def _disconnect(self):
- """
- private method for disconnecting
- """
- if self.subp is not None:
- self.subp.terminate()
- self.subp = None
- # XXX signal state changes! :)
-
- def _is_alive(self):
- """
- don't know yet
- """
- pass
-
- def _connect(self):
- """
- entry point for connection cascade methods.
- """
- #conn_result = ConState.DISCONNECTED
- try:
- conn_result = self._try_connection()
- except UnrecoverableError as except_msg:
- logger.error("FATAL: %s" % unicode(except_msg))
- conn_result = self.status.UNRECOVERABLE
- except Exception as except_msg:
- self.error_queue.append(except_msg)
- logger.error("Failed Connection: %s" %
- unicode(except_msg))
- return conn_result
-
-
-class ConnectionError(Exception):
- """
- generic connection error
- """
- def __str__(self):
- if len(self.args) >= 1:
- return repr(self.args[0])
- else:
- raise self()
-
-
-class UnrecoverableError(ConnectionError):
- """
- we cannot do anything about it, sorry
- """
- pass
diff --git a/src/leap/base/constants.py b/src/leap/base/constants.py
deleted file mode 100644
index f5665e5f..00000000
--- a/src/leap/base/constants.py
+++ /dev/null
@@ -1,42 +0,0 @@
-"""constants to be used in base module"""
-from leap import __branding
-APP_NAME = __branding.get("short_name", "leap-client")
-OPENVPN_BIN = "openvpn"
-
-# default provider placeholder
-# using `example.org` we make sure that this
-# is not going to be resolved during the tests phases
-# (we expect testers to add it to their /etc/hosts
-
-DEFAULT_PROVIDER = __branding.get(
- "provider_domain",
- "testprovider.example.org")
-
-DEFINITION_EXPECTED_PATH = "provider.json"
-
-DEFAULT_PROVIDER_DEFINITION = {
- u"api_uri": "https://api.%s/" % DEFAULT_PROVIDER,
- u"api_version": u"1",
- u"ca_cert_fingerprint": "SHA256: fff",
- u"ca_cert_uri": u"https://%s/ca.crt" % DEFAULT_PROVIDER,
- u"default_language": u"en",
- u"description": {
- u"en": u"A demonstration service provider using the LEAP platform"
- },
- u"domain": "%s" % DEFAULT_PROVIDER,
- u"enrollment_policy": u"open",
- u"languages": [
- u"en"
- ],
- u"name": {
- u"en": u"Test Provider"
- },
- u"services": [
- "openvpn"
- ]
-}
-
-
-MAX_ICMP_PACKET_LOSS = 10
-
-ROUTE_CHECK_INTERVAL = 10
diff --git a/src/leap/base/exceptions.py b/src/leap/base/exceptions.py
deleted file mode 100644
index 2e31b33b..00000000
--- a/src/leap/base/exceptions.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-Exception attributes and their meaning/uses
--------------------------------------------
-
-* critical: if True, will abort execution prematurely,
- after attempting any cleaning
- action.
-
-* failfirst: breaks any error_check loop that is examining
- the error queue.
-
-* message: the message that will be used in the __repr__ of the exception.
-
-* usermessage: the message that will be passed to user in ErrorDialogs
- in Qt-land.
-"""
-from leap.util.translations import translate
-
-
-class LeapException(Exception):
- """
- base LeapClient exception
- sets some parameters that we will check
- during error checking routines
- """
-
- critical = False
- failfirst = False
- warning = False
-
-
-class CriticalError(LeapException):
- """
- we cannot do anything about it
- """
- critical = True
- failfirst = True
-
-
-# In use ???
-# don't thing so. purge if not...
-
-class MissingConfigFileError(Exception):
- pass
-
-
-class ImproperlyConfigured(Exception):
- pass
-
-
-# NOTE: "Errors" (context) has to be a explicit string!
-
-
-class InterfaceNotFoundError(LeapException):
- # XXX should take iface arg on init maybe?
- message = "interface not found"
- usermessage = translate(
- "Errors",
- "Interface not found")
-
-
-class NoDefaultInterfaceFoundError(LeapException):
- message = "no default interface found"
- usermessage = translate(
- "Errors",
- "Looks like your computer "
- "is not connected to the internet")
-
-
-class NoConnectionToGateway(CriticalError):
- message = "no connection to gateway"
- usermessage = translate(
- "Errors",
- "Looks like there are problems "
- "with your internet connection")
-
-
-class NoInternetConnection(CriticalError):
- message = "No Internet connection found"
- usermessage = translate(
- "Errors",
- "It looks like there is no internet connection.")
- # and now we try to connect to our web to troubleshoot LOL :P
-
-
-class CannotResolveDomainError(LeapException):
- message = "Cannot resolve domain"
- usermessage = translate(
- "Errors",
- "Domain cannot be found")
-
-
-class TunnelNotDefaultRouteError(LeapException):
- message = "Tunnel connection dissapeared. VPN down?"
- usermessage = translate(
- "Errors",
- "The Encrypted Connection was lost.")
diff --git a/src/leap/base/network.py b/src/leap/base/network.py
deleted file mode 100644
index d841e692..00000000
--- a/src/leap/base/network.py
+++ /dev/null
@@ -1,107 +0,0 @@
-# -*- coding: utf-8 -*-
-from __future__ import (print_function)
-import logging
-import threading
-
-from leap.eip import config as eipconfig
-from leap.base.checks import LeapNetworkChecker
-from leap.base.constants import ROUTE_CHECK_INTERVAL
-from leap.base.exceptions import TunnelNotDefaultRouteError
-from leap.util.misc import null_check
-from leap.util.coroutines import (launch_thread, process_events)
-
-from time import sleep
-
-logger = logging.getLogger(name=__name__)
-
-
-class NetworkCheckerThread(object):
- """
- Manages network checking thread that makes sure we have a working network
- connection.
- """
- def __init__(self, *args, **kwargs):
-
- self.status_signals = kwargs.pop('status_signals', None)
- self.error_cb = kwargs.pop(
- 'error_cb',
- lambda exc: logger.error("%s", exc.message))
- self.shutdown = threading.Event()
-
- # XXX get provider passed here
- provider = kwargs.pop('provider', None)
- null_check(provider, 'provider')
-
- eipconf = eipconfig.EIPConfig(domain=provider)
- eipconf.load()
- eipserviceconf = eipconfig.EIPServiceConfig(domain=provider)
- eipserviceconf.load()
-
- gw = eipconfig.get_eip_gateway(
- eipconfig=eipconf,
- eipserviceconfig=eipserviceconf)
- self.checker = LeapNetworkChecker(
- provider_gw=gw)
-
- def start(self):
- self.process_handle = self._launch_recurrent_network_checks(
- (self.error_cb,))
-
- def stop(self):
- self.process_handle.join(timeout=0.1)
- self.shutdown.set()
- logger.debug("network checked stopped.")
-
- def run_checks(self):
- pass
-
- #private methods
-
- #here all the observers in fail_callbacks expect one positional argument,
- #which is exception so we can try by passing a lambda with logger to
- #check it works.
-
- def _network_checks_thread(self, fail_callbacks):
- #TODO: replace this with waiting for a signal from openvpn
- while True:
- try:
- self.checker.check_tunnel_default_interface()
- break
- except TunnelNotDefaultRouteError:
- # XXX ??? why do we sleep here???
- # aa: If the openvpn isn't up and running yet,
- # let's give it a moment to breath.
- #logger.error('NOT DEFAULT ROUTE!----')
- # Instead of this, we should flag when the
- # iface IS SUPPOSED to be up imo. -- kali
- sleep(1)
-
- fail_observer_dict = dict(((
- observer,
- process_events(observer)) for observer in fail_callbacks))
-
- while not self.shutdown.is_set():
- try:
- self.checker.check_tunnel_default_interface()
- self.checker.check_internet_connection()
- sleep(ROUTE_CHECK_INTERVAL)
- except Exception as exc:
- for obs in fail_observer_dict:
- fail_observer_dict[obs].send(exc)
- sleep(ROUTE_CHECK_INTERVAL)
-
- #reset event
- # I see a problem with this. You cannot stop it, it
- # resets itself forever. -- kali
-
- # XXX use QTimer for the recurrent triggers,
- # and ditch the sleeps.
- logger.debug('resetting event')
- self.shutdown.clear()
-
- def _launch_recurrent_network_checks(self, fail_callbacks):
- # XXX reimplement using QTimer -- kali
- watcher = launch_thread(
- self._network_checks_thread,
- (fail_callbacks,))
- return watcher
diff --git a/src/leap/base/pluggableconfig.py b/src/leap/base/pluggableconfig.py
deleted file mode 100644
index 3517db6b..00000000
--- a/src/leap/base/pluggableconfig.py
+++ /dev/null
@@ -1,455 +0,0 @@
-"""
-generic configuration handlers
-"""
-import copy
-import json
-import logging
-import os
-import time
-import urlparse
-
-import jsonschema
-
-from leap.util.translations import LEAPTranslatable
-
-logger = logging.getLogger(__name__)
-
-
-__all__ = ['PluggableConfig',
- 'adaptors',
- 'types',
- 'UnknownOptionException',
- 'MissingValueException',
- 'ConfigurationProviderException',
- 'TypeCastException']
-
-# exceptions
-
-
-class UnknownOptionException(Exception):
- """exception raised when a non-configuration
- value is present in the configuration"""
-
-
-class MissingValueException(Exception):
- """exception raised when a required value is missing"""
-
-
-class ConfigurationProviderException(Exception):
- """exception raised when a configuration provider is missing, etc"""
-
-
-class TypeCastException(Exception):
- """exception raised when a
- configuration item cannot be coerced to a type"""
-
-
-class ConfigAdaptor(object):
- """
- abstract base class for config adaotors for
- serialization/deserialization and custom validation
- and type casting.
- """
- def read(self, filename):
- raise NotImplementedError("abstract base class")
-
- def write(self, config, filename):
- with open(filename, 'w') as f:
- self._write(f, config)
-
- def _write(self, fp, config):
- raise NotImplementedError("abstract base class")
-
- def validate(self, config, schema):
- raise NotImplementedError("abstract base class")
-
-
-adaptors = {}
-
-
-class JSONSchemaEncoder(json.JSONEncoder):
- """
- custom default encoder that
- casts python objects to json objects for
- the schema validation
- """
- def default(self, obj):
- if obj is str:
- return 'string'
- if obj is unicode:
- return 'string'
- if obj is int:
- return 'integer'
- if obj is list:
- return 'array'
- if obj is dict:
- return 'object'
- if obj is bool:
- return 'boolean'
-
-
-class JSONAdaptor(ConfigAdaptor):
- indent = 2
- extensions = ['json']
-
- def read(self, _from):
- if isinstance(_from, file):
- _from_string = _from.read()
- if isinstance(_from, str):
- _from_string = _from
- return json.loads(_from_string)
-
- def _write(self, fp, config):
- fp.write(json.dumps(config,
- indent=self.indent,
- sort_keys=True))
-
- def validate(self, config, schema_obj):
- schema_json = JSONSchemaEncoder().encode(schema_obj)
- schema = json.loads(schema_json)
- jsonschema.validate(config, schema)
-
-
-adaptors['json'] = JSONAdaptor()
-
-#
-# Adaptors
-#
-# Allow to apply a predefined set of types to the
-# specs, so it checks the validity of formats and cast it
-# to proper python types.
-
-# TODO:
-# - HTTPS uri
-
-
-class DateType(object):
- fmt = '%Y-%m-%d'
-
- def to_python(self, data):
- return time.strptime(data, self.fmt)
-
- def get_prep_value(self, data):
- return time.strftime(self.fmt, data)
-
-
-class TranslatableType(object):
- """
- a type that casts to LEAPTranslatable objects.
- Used for labels we get from providers and stuff.
- """
-
- def to_python(self, data):
- return LEAPTranslatable(data)
-
- # needed? we already have an extended dict...
- #def get_prep_value(self, data):
- #return dict(data)
-
-
-class URIType(object):
-
- def to_python(self, data):
- parsed = urlparse.urlparse(data)
- if not parsed.scheme:
- raise TypeCastException("uri %s has no schema" % data)
- return parsed
-
- def get_prep_value(self, data):
- return data.geturl()
-
-
-class HTTPSURIType(object):
-
- def to_python(self, data):
- parsed = urlparse.urlparse(data)
- if not parsed.scheme:
- raise TypeCastException("uri %s has no schema" % data)
- if parsed.scheme != "https":
- raise TypeCastException(
- "uri %s does not has "
- "https schema" % data)
- return parsed
-
- def get_prep_value(self, data):
- return data.geturl()
-
-
-types = {
- 'date': DateType(),
- 'uri': URIType(),
- 'https-uri': HTTPSURIType(),
- 'translatable': TranslatableType(),
-}
-
-
-class PluggableConfig(object):
-
- options = {}
-
- def __init__(self,
- adaptors=adaptors,
- types=types,
- format=None):
-
- self.config = {}
- self.adaptors = adaptors
- self.types = types
- self._format = format
- self.mtime = None
- self.dirty = False
-
- @property
- def option_dict(self):
- if hasattr(self, 'options') and isinstance(self.options, dict):
- return self.options.get('properties', None)
-
- def items(self):
- """
- act like an iterator
- """
- if isinstance(self.option_dict, dict):
- return self.option_dict.items()
- return self.options
-
- def validate(self, config, format=None):
- """
- validate config
- """
- schema = self.options
- if format is None:
- format = self._format
-
- if format:
- adaptor = self.get_adaptor(self._format)
- adaptor.validate(config, schema)
- else:
- # we really should make format mandatory...
- logger.error('no format passed to validate')
-
- # first round of validation is ok.
- # now we proceed to cast types if any specified.
- self.to_python(config)
-
- def to_python(self, config):
- """
- cast types following first type and then format indications.
- """
- unseen_options = [i for i in config if i not in self.option_dict]
- if unseen_options:
- raise UnknownOptionException(
- "Unknown options: %s" % ', '.join(unseen_options))
-
- for key, value in config.items():
- _type = self.option_dict[key].get('type')
- if _type is None and 'default' in self.option_dict[key]:
- _type = type(self.option_dict[key]['default'])
- if _type is not None:
- tocast = True
- if not callable(_type) and isinstance(value, _type):
- tocast = False
- if tocast:
- try:
- config[key] = _type(value)
- except BaseException, e:
- raise TypeCastException(
- "Could not coerce %s, %s, "
- "to type %s: %s" % (key, value, _type.__name__, e))
- _format = self.option_dict[key].get('format', None)
- _ftype = self.types.get(_format, None)
- if _ftype:
- try:
- config[key] = _ftype.to_python(value)
- except BaseException, e:
- raise TypeCastException(
- "Could not coerce %s, %s, "
- "to format %s: %s" % (key, value,
- _ftype.__class__.__name__,
- e))
-
- return config
-
- def prep_value(self, config):
- """
- the inverse of to_python method,
- called just before serialization
- """
- for key, value in config.items():
- _format = self.option_dict[key].get('format', None)
- _ftype = self.types.get(_format, None)
- if _ftype and hasattr(_ftype, 'get_prep_value'):
- try:
- config[key] = _ftype.get_prep_value(value)
- except BaseException, e:
- raise TypeCastException(
- "Could not serialize %s, %s, "
- "by format %s: %s" % (key, value,
- _ftype.__class__.__name__,
- e))
- else:
- config[key] = value
- return config
-
- # methods for adding configuration
-
- def get_default_values(self):
- """
- return a config options from configuration defaults
- """
- defaults = {}
- for key, value in self.items():
- if 'default' in value:
- defaults[key] = value['default']
- return copy.deepcopy(defaults)
-
- def get_adaptor(self, format):
- """
- get specified format adaptor or
- guess for a given filename
- """
- adaptor = self.adaptors.get(format, None)
- if adaptor:
- return adaptor
-
- # not registered in adaptors dict, let's try all
- for adaptor in self.adaptors.values():
- if format in adaptor.extensions:
- return adaptor
-
- def filename2format(self, filename):
- extension = os.path.splitext(filename)[-1]
- return extension.lstrip('.') or None
-
- def serialize(self, filename, format=None, full=False):
- if not format:
- format = self._format
- if not format:
- format = self.filename2format(filename)
- if not format:
- raise Exception('Please specify a format')
- # TODO: more specific exception type
-
- adaptor = self.get_adaptor(format)
- if not adaptor:
- raise Exception("Adaptor not found for format: %s" % format)
-
- config = copy.deepcopy(self.config)
- serializable = self.prep_value(config)
- adaptor.write(serializable, filename)
-
- if self.mtime:
- self.touch_mtime(filename)
-
- def touch_mtime(self, filename):
- mtime = self.mtime
- os.utime(filename, (mtime, mtime))
-
- def deserialize(self, string=None, fromfile=None, format=None):
- """
- load configuration from a file or string
- """
-
- def _try_deserialize():
- if fromfile:
- with open(fromfile, 'r') as f:
- content = adaptor.read(f)
- elif string:
- content = adaptor.read(string)
- return content
-
- # XXX cleanup this!
-
- if fromfile:
- assert os.path.exists(fromfile)
- if not format:
- format = self.filename2format(fromfile)
-
- if not format:
- format = self._format
- if format:
- adaptor = self.get_adaptor(format)
- else:
- adaptor = None
-
- if adaptor:
- content = _try_deserialize()
- return content
-
- # no adaptor, let's try rest of adaptors
-
- adaptors = self.adaptors[:]
-
- if format:
- adaptors.sort(
- key=lambda x: int(
- format in x.extensions),
- reverse=True)
-
- for adaptor in adaptors:
- content = _try_deserialize()
- return content
-
- def set_dirty(self):
- self.dirty = True
-
- def is_dirty(self):
- return self.dirty
-
- def load(self, *args, **kwargs):
- """
- load from string or file
- if no string of fromfile option is given,
- it will attempt to load from defaults
- defined in the schema.
- """
- string = args[0] if args else None
- fromfile = kwargs.get("fromfile", None)
- mtime = kwargs.pop("mtime", None)
- self.mtime = mtime
- content = None
-
- # start with defaults, so we can
- # have partial values applied.
- content = self.get_default_values()
- if string and isinstance(string, str):
- content = self.deserialize(string)
-
- if not string and fromfile is not None:
- #import ipdb;ipdb.set_trace()
- content = self.deserialize(fromfile=fromfile)
-
- if not content:
- logger.error('no content could be loaded')
- # XXX raise!
- return
-
- # lazy evaluation until first level of nesting
- # to allow lambdas with context-dependant info
- # like os.path.expanduser
- for k, v in content.iteritems():
- if callable(v):
- content[k] = v()
-
- self.validate(content)
- self.config = content
- return True
-
-
-def testmain(): # pragma: no cover
-
- from tests import test_validation as t
- import pprint
-
- config = PluggableConfig(_format="json")
- properties = copy.deepcopy(t.sample_spec)
-
- config.options = properties
- config.load(fromfile='data.json')
-
- print 'config'
- pprint.pprint(config.config)
-
- config.serialize('/tmp/testserial.json')
-
-if __name__ == "__main__":
- testmain()
diff --git a/src/leap/base/providers.py b/src/leap/base/providers.py
deleted file mode 100644
index d41f3695..00000000
--- a/src/leap/base/providers.py
+++ /dev/null
@@ -1,29 +0,0 @@
-"""all dealing with leap-providers: definition files, updating"""
-from leap.base import config as baseconfig
-from leap.base import specs
-
-
-class LeapProviderDefinition(baseconfig.JSONLeapConfig):
- spec = specs.leap_provider_spec
-
- def _get_slug(self):
- domain = getattr(self, 'domain', None)
- if domain:
- path = baseconfig.get_provider_path(domain)
- else:
- path = baseconfig.get_default_provider_path()
-
- return baseconfig.get_config_file(
- 'provider.json', folder=path)
-
- def _set_slug(self, *args, **kwargs):
- raise AttributeError("you cannot set slug")
-
- slug = property(_get_slug, _set_slug)
-
-
-class LeapProviderSet(object):
- # we gather them from the filesystem
- # TODO: (MVS+)
- def __init__(self):
- self.count = 0
diff --git a/src/leap/base/specs.py b/src/leap/base/specs.py
deleted file mode 100644
index f57d7e9c..00000000
--- a/src/leap/base/specs.py
+++ /dev/null
@@ -1,67 +0,0 @@
-leap_provider_spec = {
- 'description': 'provider definition',
- 'type': 'object',
- 'properties': {
- #'serial': {
- #'type': int,
- #'default': 1,
- #'required': True,
- #},
- 'version': {
- 'type': unicode,
- 'default': '0.1.0'
- #'required': True
- },
- "default_language": {
- 'type': unicode,
- 'default': 'en'
- },
- 'domain': {
- 'type': unicode, # XXX define uri type
- 'default': 'testprovider.example.org'
- #'required': True,
- },
- 'name': {
- #'type': LEAPTranslatable,
- 'type': dict,
- 'format': 'translatable',
- 'default': {u'en': u'Test Provider'}
- #'required': True
- },
- 'description': {
- #'type': LEAPTranslatable,
- 'type': dict,
- 'format': 'translatable',
- 'default': {u'en': u'Test provider'}
- },
- 'enrollment_policy': {
- 'type': unicode, # oneof ??
- 'default': 'open'
- },
- 'services': {
- 'type': list, # oneof ??
- 'default': ['eip']
- },
- 'api_version': {
- 'type': unicode,
- 'default': '0.1.0' # version regexp
- },
- 'api_uri': {
- 'type': unicode # uri
- },
- 'public_key': {
- 'type': unicode # fingerprint
- },
- 'ca_cert_fingerprint': {
- 'type': unicode,
- },
- 'ca_cert_uri': {
- 'type': unicode,
- 'format': 'https-uri'
- },
- 'languages': {
- 'type': list,
- 'default': ['en']
- }
- }
-}
diff --git a/src/leap/base/tests/__init__.py b/src/leap/base/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/base/tests/__init__.py
+++ /dev/null
diff --git a/src/leap/base/tests/test_auth.py b/src/leap/base/tests/test_auth.py
deleted file mode 100644
index b3009a9b..00000000
--- a/src/leap/base/tests/test_auth.py
+++ /dev/null
@@ -1,58 +0,0 @@
-from BaseHTTPServer import BaseHTTPRequestHandler
-import urlparse
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
-import requests
-#from mock import Mock
-
-from leap.base import auth
-#from leap.base import exceptions
-from leap.eip.tests.test_checks import NoLogRequestHandler
-from leap.testing.basetest import BaseLeapTest
-from leap.testing.https_server import BaseHTTPSServerTestCase
-
-
-class LeapSRPRegisterTests(BaseHTTPSServerTestCase, BaseLeapTest):
- __name__ = "leap_srp_register_test"
- provider = "testprovider.example.org"
-
- class request_handler(NoLogRequestHandler, BaseHTTPRequestHandler):
- responses = {
- '/': ['OK', '']}
-
- def do_GET(self):
- path = urlparse.urlparse(self.path)
- message = '\n'.join(self.responses.get(
- path.path, None))
- self.send_response(200)
- self.end_headers()
- self.wfile.write(message)
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def test_srp_auth_should_implement_check_methods(self):
- SERVER = "https://localhost:8443"
- srp_auth = auth.LeapSRPRegister(provider=SERVER, verify=False)
-
- self.assertTrue(hasattr(srp_auth, "init_session"),
- "missing meth")
- self.assertTrue(hasattr(srp_auth, "get_registration_uri"),
- "missing meth")
- self.assertTrue(hasattr(srp_auth, "register_user"),
- "missing meth")
-
- def test_srp_auth_basic_functionality(self):
- SERVER = "https://localhost:8443"
- srp_auth = auth.LeapSRPRegister(provider=SERVER, verify=False)
-
- self.assertIsInstance(srp_auth.session, requests.sessions.Session)
- self.assertEqual(
- srp_auth.get_registration_uri(),
- "https://localhost:8443/1/users")
diff --git a/src/leap/base/tests/test_checks.py b/src/leap/base/tests/test_checks.py
deleted file mode 100644
index 8126755b..00000000
--- a/src/leap/base/tests/test_checks.py
+++ /dev/null
@@ -1,177 +0,0 @@
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-import os
-import sh
-
-from mock import (patch, Mock)
-from StringIO import StringIO
-
-from leap.base import checks
-from leap.base import exceptions
-from leap.testing.basetest import BaseLeapTest
-
-_uid = os.getuid()
-
-
-class LeapNetworkCheckTest(BaseLeapTest):
- __name__ = "leap_network_check_tests"
-
- def setUp(self):
- os.environ['PATH'] += ':/bin'
- pass
-
- def tearDown(self):
- pass
-
- def test_checker_should_implement_check_methods(self):
- checker = checks.LeapNetworkChecker()
-
- self.assertTrue(hasattr(checker, "check_internet_connection"),
- "missing meth")
- self.assertTrue(hasattr(checker, "check_tunnel_default_interface"),
- "missing meth")
- self.assertTrue(hasattr(checker, "is_internet_up"),
- "missing meth")
- self.assertTrue(hasattr(checker, "ping_gateway"),
- "missing meth")
- self.assertTrue(hasattr(checker, "parse_log_and_react"),
- "missing meth")
-
- def test_checker_should_actually_call_all_tests(self):
- checker = checks.LeapNetworkChecker()
- mc = Mock()
- checker.run_all(checker=mc)
- self.assertTrue(mc.check_internet_connection.called, "not called")
- self.assertTrue(mc.check_tunnel_default_interface.called, "not called")
- self.assertTrue(mc.is_internet_up.called, "not called")
- self.assertTrue(mc.parse_log_and_react.called, "not called")
-
- # ping gateway only called if we pass provider_gw
- checker = checks.LeapNetworkChecker(provider_gw="0.0.0.0")
- mc = Mock()
- checker.run_all(checker=mc)
- self.assertTrue(mc.check_internet_connection.called, "not called")
- self.assertTrue(mc.check_tunnel_default_interface.called, "not called")
- self.assertTrue(mc.ping_gateway.called, "not called")
- self.assertTrue(mc.is_internet_up.called, "not called")
- self.assertTrue(mc.parse_log_and_react.called, "not called")
-
- def test_get_default_interface_no_interface(self):
- checker = checks.LeapNetworkChecker()
- with patch('leap.base.checks.open', create=True) as mock_open:
- with self.assertRaises(exceptions.NoDefaultInterfaceFoundError):
- mock_open.return_value = StringIO(
- "Iface\tDestination Gateway\t"
- "Flags\tRefCntd\tUse\tMetric\t"
- "Mask\tMTU\tWindow\tIRTT")
- checker.get_default_interface_gateway()
-
- def test_check_tunnel_default_interface(self):
- checker = checks.LeapNetworkChecker()
- with patch('leap.base.checks.open', create=True) as mock_open:
- with self.assertRaises(exceptions.TunnelNotDefaultRouteError):
- mock_open.return_value = StringIO(
- "Iface\tDestination Gateway\t"
- "Flags\tRefCntd\tUse\tMetric\t"
- "Mask\tMTU\tWindow\tIRTT\n"
- "wlan0\t00000000\t0102A8C0\t"
- "0003\t0\t0\t0\t00000000\t0\t0\t0")
- checker.check_tunnel_default_interface()
-
- with patch('leap.base.checks.open', create=True) as mock_open:
- mock_open.return_value = StringIO(
- "Iface\tDestination Gateway\t"
- "Flags\tRefCntd\tUse\tMetric\t"
- "Mask\tMTU\tWindow\tIRTT\n"
- "tun0\t00000000\t01002A0A\t0003\t0\t0\t0\t00000080\t0\t0\t0")
- checker.check_tunnel_default_interface()
-
- def test_ping_gateway_fail(self):
- checker = checks.LeapNetworkChecker()
- with patch.object(sh, "ping") as mocked_ping:
- with self.assertRaises(exceptions.NoConnectionToGateway):
- mocked_ping.return_value = Mock
- mocked_ping.return_value.stdout = "11% packet loss"
- checker.ping_gateway("4.2.2.2")
-
- def test_ping_gateway(self):
- checker = checks.LeapNetworkChecker()
- with patch.object(sh, "ping") as mocked_ping:
- mocked_ping.return_value = Mock
- mocked_ping.return_value.stdout = """
-PING 4.2.2.2 (4.2.2.2) 56(84) bytes of data.
-64 bytes from 4.2.2.2: icmp_req=1 ttl=54 time=33.8 ms
-64 bytes from 4.2.2.2: icmp_req=2 ttl=54 time=30.6 ms
-64 bytes from 4.2.2.2: icmp_req=3 ttl=54 time=31.4 ms
-64 bytes from 4.2.2.2: icmp_req=4 ttl=54 time=36.1 ms
-64 bytes from 4.2.2.2: icmp_req=5 ttl=54 time=30.8 ms
-64 bytes from 4.2.2.2: icmp_req=6 ttl=54 time=30.4 ms
-64 bytes from 4.2.2.2: icmp_req=7 ttl=54 time=30.7 ms
-64 bytes from 4.2.2.2: icmp_req=8 ttl=54 time=32.7 ms
-64 bytes from 4.2.2.2: icmp_req=9 ttl=54 time=31.4 ms
-64 bytes from 4.2.2.2: icmp_req=10 ttl=54 time=33.3 ms
-
---- 4.2.2.2 ping statistics ---
-10 packets transmitted, 10 received, 0% packet loss, time 9016ms
-rtt min/avg/max/mdev = 30.497/32.172/36.161/1.755 ms"""
- checker.ping_gateway("4.2.2.2")
-
- def test_check_internet_connection_failures(self):
- checker = checks.LeapNetworkChecker()
- TimeoutError = get_ping_timeout_error()
- with patch.object(sh, "ping") as mocked_ping:
- mocked_ping.side_effect = TimeoutError
- with self.assertRaises(exceptions.NoInternetConnection):
- with patch.object(checker, "ping_gateway") as mock_gateway:
- mock_gateway.side_effect = exceptions.NoConnectionToGateway
- checker.check_internet_connection()
-
- with patch.object(sh, "ping") as mocked_ping:
- mocked_ping.side_effect = TimeoutError
- with self.assertRaises(exceptions.NoInternetConnection):
- with patch.object(checker, "ping_gateway") as mock_gateway:
- mock_gateway.return_value = True
- checker.check_internet_connection()
-
- def test_parse_log_and_react(self):
- checker = checks.LeapNetworkChecker()
- to_call = Mock()
- log = [("leap.openvpn - INFO - Mon Nov 19 13:36:24 2012 "
- "read UDPv4 [ECONNREFUSED]: Connection refused (code=111)")]
- err_matrix = [(checks.EVENT_CONNECT_REFUSED, (to_call, ))]
- checker.parse_log_and_react(log, err_matrix)
- self.assertTrue(to_call.called)
-
- log = [("2012-11-19 13:36:26,177 - leap.openvpn - INFO - "
- "Mon Nov 19 13:36:24 2012 ERROR: Linux route delete command "
- "failed: external program exited"),
- ("2012-11-19 13:36:26,178 - leap.openvpn - INFO - "
- "Mon Nov 19 13:36:24 2012 ERROR: Linux route delete command "
- "failed: external program exited"),
- ("2012-11-19 13:36:26,180 - leap.openvpn - INFO - "
- "Mon Nov 19 13:36:24 2012 ERROR: Linux route delete command "
- "failed: external program exited"),
- ("2012-11-19 13:36:26,181 - leap.openvpn - INFO - "
- "Mon Nov 19 13:36:24 2012 /sbin/ifconfig tun0 0.0.0.0"),
- ("2012-11-19 13:36:26,182 - leap.openvpn - INFO - "
- "Mon Nov 19 13:36:24 2012 Linux ip addr del failed: external "
- "program exited with error stat"),
- ("2012-11-19 13:36:26,183 - leap.openvpn - INFO - "
- "Mon Nov 19 13:36:26 2012 SIGTERM[hard,] received, process"
- "exiting"), ]
- to_call.reset_mock()
- checker.parse_log_and_react(log, err_matrix)
- self.assertFalse(to_call.called)
-
- to_call.reset_mock()
- checker.parse_log_and_react([], err_matrix)
- self.assertFalse(to_call.called)
-
-
-def get_ping_timeout_error():
- try:
- sh.ping("-c", "1", "-w", "1", "8.8.7.7")
- except Exception as e:
- return e
diff --git a/src/leap/base/tests/test_config.py b/src/leap/base/tests/test_config.py
deleted file mode 100644
index d03149b2..00000000
--- a/src/leap/base/tests/test_config.py
+++ /dev/null
@@ -1,247 +0,0 @@
-import json
-import os
-import platform
-import socket
-#import tempfile
-
-import mock
-import requests
-
-from leap.base import config
-from leap.base import constants
-from leap.base import exceptions
-from leap.eip import constants as eipconstants
-from leap.util.fileutil import mkdir_p
-from leap.testing.basetest import BaseLeapTest
-
-
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
-_system = platform.system()
-
-
-class JSONLeapConfigTest(BaseLeapTest):
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def test_metaclass(self):
- with self.assertRaises(exceptions.ImproperlyConfigured) as exc:
- class DummyTestConfig(config.JSONLeapConfig):
- __metaclass__ = config.MetaConfigWithSpec
- exc.startswith("missing spec dict")
-
- class DummyTestConfig(config.JSONLeapConfig):
- __metaclass__ = config.MetaConfigWithSpec
- spec = {'properties': {}}
- with self.assertRaises(exceptions.ImproperlyConfigured) as exc:
- DummyTestConfig()
- exc.startswith("missing slug")
-
- class DummyTestConfig(config.JSONLeapConfig):
- __metaclass__ = config.MetaConfigWithSpec
- spec = {'properties': {}}
- slug = "foo"
- DummyTestConfig()
-
-######################################3
-#
-# provider fetch tests block
-#
-
-
-class ProviderTest(BaseLeapTest):
- # override per test fixtures
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
-
-# XXX depreacated. similar test in eip.checks
-
-#class BareHomeTestCase(ProviderTest):
-#
- #__name__ = "provider_config_tests_bare_home"
-#
- #def test_should_raise_if_missing_eip_json(self):
- #with self.assertRaises(exceptions.MissingConfigFileError):
- #config.get_config_json(os.path.join(self.home, 'eip.json'))
-
-
-class ProviderDefinitionTestCase(ProviderTest):
- # XXX MOVE TO eip.test_checks
- # -- kali 2012-08-24 00:38
-
- __name__ = "provider_config_tests"
-
- def setUp(self):
- # dump a sample eip file
- # XXX Move to Use EIP Spec Instead!!!
- # XXX tests to be moved to eip.checks and eip.providers
- # XXX can use eipconfig.dump_default_eipconfig
-
- path = os.path.join(self.home, '.config', 'leap')
- mkdir_p(path)
- with open(os.path.join(path, 'eip.json'), 'w') as fp:
- json.dump(eipconstants.EIP_SAMPLE_JSON, fp)
-
-
-# these tests below should move to
-# eip.checks
-# config.Configuration has been deprecated
-
-# TODO:
-# - We're instantiating a ProviderTest because we're doing the home wipeoff
-# on setUpClass instead of the setUp (for speedup of the general cases).
-
-# We really should be testing all of them in the same testCase, and
-# doing an extra wipe of the tempdir... but be careful!!!! do not mess with
-# os.environ home more than needed... that could potentially bite!
-
-# XXX actually, another thing to fix here is separating tests:
-# - test that requests has been called.
-# - check deeper for error types/msgs
-
-# we SHOULD inject requests dep in the constructor
-# (so we can pass mock easily).
-
-
-#class ProviderFetchConError(ProviderTest):
- #def test_connection_error(self):
- #with mock.patch.object(requests, "get") as mock_method:
- #mock_method.side_effect = requests.ConnectionError
- #cf = config.Configuration()
- #self.assertIsInstance(cf.error, str)
-#
-#
-#class ProviderFetchHttpError(ProviderTest):
- #def test_file_not_found(self):
- #with mock.patch.object(requests, "get") as mock_method:
- #mock_method.side_effect = requests.HTTPError
- #cf = config.Configuration()
- #self.assertIsInstance(cf.error, str)
-#
-#
-#class ProviderFetchInvalidUrl(ProviderTest):
- #def test_invalid_url(self):
- #cf = config.Configuration("ht")
- #self.assertTrue(cf.error)
-
-
-# end provider fetch tests
-###########################################
-
-
-class ConfigHelperFunctions(BaseLeapTest):
-
- __name__ = "config_helper_tests"
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- # tests
-
- @unittest.skipUnless(_system == "Linux", "linux only")
- def test_lin_get_config_file(self):
- """
- config file path where expected? (linux)
- """
- self.assertEqual(
- config.get_config_file(
- 'test', folder="foo/bar"),
- os.path.expanduser(
- '~/.config/leap/foo/bar/test')
- )
-
- @unittest.skipUnless(_system == "Darwin", "mac only")
- def test_mac_get_config_file(self):
- """
- config file path where expected? (mac)
- """
- self._missing_test_for_plat(do_raise=True)
-
- @unittest.skipUnless(_system == "Windows", "win only")
- def test_win_get_config_file(self):
- """
- config file path where expected?
- """
- self._missing_test_for_plat(do_raise=True)
-
- #
- # XXX hey, I'm raising exceptions here
- # on purpose. just wanted to make sure
- # that the skip stuff is doing it right.
- # If you're working on win/macos tests,
- # feel free to remove tests that you see
- # are too redundant.
-
- @unittest.skipUnless(_system == "Linux", "linux only")
- def test_lin_get_config_dir(self):
- """
- nice config dir? (linux)
- """
- self.assertEqual(
- config.get_config_dir(),
- os.path.expanduser('~/.config/leap'))
-
- @unittest.skipUnless(_system == "Darwin", "mac only")
- def test_mac_get_config_dir(self):
- """
- nice config dir? (mac)
- """
- self._missing_test_for_plat(do_raise=True)
-
- @unittest.skipUnless(_system == "Windows", "win only")
- def test_win_get_config_dir(self):
- """
- nice config dir? (win)
- """
- self._missing_test_for_plat(do_raise=True)
-
- # provider paths
-
- @unittest.skipUnless(_system == "Linux", "linux only")
- def test_get_default_provider_path(self):
- """
- is default provider path ok?
- """
- self.assertEqual(
- config.get_default_provider_path(),
- os.path.expanduser(
- '~/.config/leap/providers/%s/' %
- constants.DEFAULT_PROVIDER)
- )
-
- # validate ip
-
- def test_validate_ip(self):
- """
- check our ip validation
- """
- config.validate_ip('3.3.3.3')
- with self.assertRaises(socket.error):
- config.validate_ip('255.255.255.256')
- with self.assertRaises(socket.error):
- config.validate_ip('foobar')
-
- @unittest.skip
- def test_validate_domain(self):
- """
- code to be written yet
- """
- raise NotImplementedError
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/base/tests/test_providers.py b/src/leap/base/tests/test_providers.py
deleted file mode 100644
index f257f54d..00000000
--- a/src/leap/base/tests/test_providers.py
+++ /dev/null
@@ -1,150 +0,0 @@
-import copy
-import json
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-import os
-
-import jsonschema
-
-#from leap import __branding as BRANDING
-from leap.testing.basetest import BaseLeapTest
-from leap.base import providers
-
-
-EXPECTED_DEFAULT_CONFIG = {
- u"api_version": u"0.1.0",
- #u"description": "LEAPTranslatable<{u'en': u'Test provider'}>",
- u"description": {u'en': u'Test provider'},
- u"default_language": u"en",
- #u"display_name": {u'en': u"Test Provider"},
- u"domain": u"testprovider.example.org",
- #u'name': "LEAPTranslatable<{u'en': u'Test Provider'}>",
- u'name': {u'en': u'Test Provider'},
- u"enrollment_policy": u"open",
- #u"serial": 1,
- u"services": [
- u"eip"
- ],
- u"languages": [u"en"],
- u"version": u"0.1.0"
-}
-
-
-class TestLeapProviderDefinition(BaseLeapTest):
- def setUp(self):
- self.domain = "testprovider.example.org"
- self.definition = providers.LeapProviderDefinition(
- domain=self.domain)
- self.definition.save(force=True)
- self.definition.load() # why have to load after save??
- self.config = self.definition.config
-
- def tearDown(self):
- if hasattr(self, 'testfile') and os.path.isfile(self.testfile):
- os.remove(self.testfile)
-
- # tests
-
- # XXX most of these tests can be made more abstract
- # and moved to test_baseconfig *triangulate!*
-
- def test_provider_slug_property(self):
- slug = self.definition.slug
- self.assertEquals(
- slug,
- os.path.join(
- self.home,
- '.config', 'leap', 'providers',
- '%s' % self.domain,
- 'provider.json'))
- with self.assertRaises(AttributeError):
- self.definition.slug = 23
-
- def test_provider_dump(self):
- # check a good provider definition is dumped to disk
- self.testfile = self.get_tempfile('test.json')
- self.definition.save(to=self.testfile, force=True)
- deserialized = json.load(open(self.testfile, 'rb'))
- self.maxDiff = None
- #import ipdb;ipdb.set_trace()
- self.assertEqual(deserialized, EXPECTED_DEFAULT_CONFIG)
-
- def test_provider_dump_to_slug(self):
- # same as above, but we test the ability to save to a
- # file generated from the slug.
- # XXX THIS TEST SHOULD MOVE TO test_baseconfig
- self.definition.save()
- filename = self.definition.filename
- self.assertTrue(os.path.isfile(filename))
- deserialized = json.load(open(filename, 'rb'))
- self.assertEqual(deserialized, EXPECTED_DEFAULT_CONFIG)
-
- def test_provider_load(self):
- # check loading provider from disk file
- self.testfile = self.get_tempfile('test_load.json')
- with open(self.testfile, 'w') as wf:
- wf.write(json.dumps(EXPECTED_DEFAULT_CONFIG))
- self.definition.load(fromfile=self.testfile)
- #self.assertDictEqual(self.config,
- #EXPECTED_DEFAULT_CONFIG)
- self.assertItemsEqual(self.config, EXPECTED_DEFAULT_CONFIG)
-
- def test_provider_validation(self):
- self.definition.validate(self.config)
- _config = copy.deepcopy(self.config)
- # bad type, raise validation error
- _config['domain'] = 111
- with self.assertRaises(jsonschema.ValidationError):
- self.definition.validate(_config)
-
- @unittest.skip
- def test_load_malformed_json_definition(self):
- raise NotImplementedError
-
- @unittest.skip
- def test_type_validation(self):
- # check various type validation
- # type cast
- raise NotImplementedError
-
-
-class TestLeapProviderSet(BaseLeapTest):
-
- def setUp(self):
- self.providers = providers.LeapProviderSet()
-
- def tearDown(self):
- pass
- ###
-
- def test_get_zero_count(self):
- self.assertEqual(self.providers.count, 0)
-
- @unittest.skip
- def test_count_defined_providers(self):
- # check the method used for making
- # the list of providers
- raise NotImplementedError
-
- @unittest.skip
- def test_get_default_provider(self):
- raise NotImplementedError
-
- @unittest.skip
- def test_should_be_at_least_one_provider_after_init(self):
- # when we init an empty environment,
- # there should be at least one provider,
- # that will be a dump of the default provider definition
- # somehow a high level test
- raise NotImplementedError
-
- @unittest.skip
- def test_get_eip_remote_from_default_provider(self):
- # from: default provider
- # expect: remote eip domain
- raise NotImplementedError
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/base/tests/test_validation.py b/src/leap/base/tests/test_validation.py
deleted file mode 100644
index 87e99648..00000000
--- a/src/leap/base/tests/test_validation.py
+++ /dev/null
@@ -1,92 +0,0 @@
-import copy
-import datetime
-#import json
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-import os
-
-import jsonschema
-
-from leap.base.config import JSONLeapConfig
-from leap.base import pluggableconfig
-from leap.testing.basetest import BaseLeapTest
-
-SAMPLE_CONFIG_DICT = {
- 'prop_one': 1,
- 'prop_uri': "http://example.org",
- 'prop_date': '2012-12-12',
-}
-
-EXPECTED_CONFIG = {
- 'prop_one': 1,
- 'prop_uri': "http://example.org",
- 'prop_date': datetime.datetime(2012, 12, 12)
-}
-
-sample_spec = {
- 'description': 'sample schema definition',
- 'type': 'object',
- 'properties': {
- 'prop_one': {
- 'type': int,
- 'default': 1,
- 'required': True
- },
- 'prop_uri': {
- 'type': str,
- 'default': 'http://example.org',
- 'required': True,
- 'format': 'uri'
- },
- 'prop_date': {
- 'type': str,
- 'default': '2012-12-12',
- 'format': 'date'
- }
- }
-}
-
-
-class SampleConfig(JSONLeapConfig):
- spec = sample_spec
-
- @property
- def slug(self):
- return os.path.expanduser('~/sampleconfig.json')
-
-
-class TestJSONLeapConfigValidation(BaseLeapTest):
- def setUp(self):
- self.sampleconfig = SampleConfig()
- self.sampleconfig.save()
- self.sampleconfig.load()
- self.config = self.sampleconfig.config
-
- def tearDown(self):
- if hasattr(self, 'testfile') and os.path.isfile(self.testfile):
- os.remove(self.testfile)
-
- # tests
-
- def test_good_validation(self):
- self.sampleconfig.validate(SAMPLE_CONFIG_DICT)
-
- def test_broken_int(self):
- _config = copy.deepcopy(SAMPLE_CONFIG_DICT)
- _config['prop_one'] = '1'
- with self.assertRaises(jsonschema.ValidationError):
- self.sampleconfig.validate(_config)
-
- def test_format_property(self):
- # JsonSchema Validator does not check the format property.
- # We should have to extend the Configuration class
- blah = copy.deepcopy(SAMPLE_CONFIG_DICT)
- blah['prop_uri'] = 'xxx'
- with self.assertRaises(pluggableconfig.TypeCastException):
- self.sampleconfig.validate(blah)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/baseapp/__init__.py b/src/leap/baseapp/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/baseapp/__init__.py
+++ /dev/null
diff --git a/src/leap/baseapp/constants.py b/src/leap/baseapp/constants.py
deleted file mode 100644
index e312be21..00000000
--- a/src/leap/baseapp/constants.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# This timer used for polling vpn manager state.
-
-# XXX what is an optimum polling interval?
-# too little will be overkill, too much will
-# miss transition states.
-TIMER_MILLISECONDS = 250.0
diff --git a/src/leap/baseapp/dialogs.py b/src/leap/baseapp/dialogs.py
deleted file mode 100644
index d256fc99..00000000
--- a/src/leap/baseapp/dialogs.py
+++ /dev/null
@@ -1,61 +0,0 @@
-# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
-import logging
-
-from PyQt4.QtGui import (QDialog, QFrame, QPushButton, QLabel, QMessageBox)
-
-logger = logging.getLogger(name=__name__)
-
-
-class ErrorDialog(QDialog):
- def __init__(self, parent=None, errtype=None, msg=None, label=None):
- super(ErrorDialog, self).__init__(parent)
- frameStyle = QFrame.Sunken | QFrame.Panel
- self.warningLabel = QLabel()
- self.warningLabel.setFrameStyle(frameStyle)
- self.warningButton = QPushButton("QMessageBox.&warning()")
-
- if msg is not None:
- self.msg = msg
- if label is not None:
- self.label = label
- if errtype == "critical":
- self.criticalMessage(self.msg, self.label)
-
- def warningMessage(self, msg, label):
- msgBox = QMessageBox(QMessageBox.Warning,
- "LEAP Client Error",
- msg,
- QMessageBox.NoButton, self)
- msgBox.addButton("&Ok", QMessageBox.AcceptRole)
- if msgBox.exec_() == QMessageBox.AcceptRole:
- pass
- # do whatever we want to do after
- # closing the dialog. we can pass that
- # in the constructor
-
- def criticalMessage(self, msg, label):
- msgBox = QMessageBox(QMessageBox.Critical,
- "LEAP Client Error",
- msg,
- QMessageBox.NoButton, self)
- msgBox.addButton("&Ok", QMessageBox.AcceptRole)
- msgBox.exec_()
-
- # It's critical, so we exit.
- # We should better emit a signal and connect it
- # with the proper shutdownAndQuit method, but
- # this suffices for now.
- logger.info('Quitting')
- import sys
- sys.exit()
-
- def confirmMessage(self, msg, label, action):
- msgBox = QMessageBox(QMessageBox.Critical,
- self.tr("LEAP Client Error"),
- msg,
- QMessageBox.NoButton, self)
- msgBox.addButton("&Ok", QMessageBox.AcceptRole)
- msgBox.addButton("&Cancel", QMessageBox.RejectRole)
-
- if msgBox.exec_() == QMessageBox.AcceptRole:
- action()
diff --git a/src/leap/baseapp/eip.py b/src/leap/baseapp/eip.py
deleted file mode 100644
index b34cc82e..00000000
--- a/src/leap/baseapp/eip.py
+++ /dev/null
@@ -1,243 +0,0 @@
-from __future__ import print_function
-import logging
-import time
-#import sys
-
-from PyQt4 import QtCore
-
-from leap.baseapp.dialogs import ErrorDialog
-from leap.baseapp import constants
-from leap.eip import exceptions as eip_exceptions
-from leap.eip.eipconnection import EIPConnection
-from leap.base.checks import EVENT_CONNECT_REFUSED
-from leap.util import geo
-
-logger = logging.getLogger(name=__name__)
-
-
-class EIPConductorAppMixin(object):
- """
- initializes an instance of EIPConnection,
- gathers errors, and passes status-change signals
- from Qt land along to the conductor.
- Connects the eip connect/disconnect logic
- to the switches in the app (buttons/menu items).
- """
- ERR_DIALOG = False
-
- def __init__(self, *args, **kwargs):
- opts = kwargs.pop('opts')
- config_file = getattr(opts, 'config_file', None)
- provider = kwargs.pop('provider')
-
- self.eip_service_started = False
-
- # conductor (eip connection) is in charge of all
- # vpn-related configuration / monitoring.
- # we pass a tuple of signals that will be
- # triggered when status changes.
-
- self.conductor = EIPConnection(
- watcher_cb=self.newLogLine.emit,
- config_file=config_file,
- checker_signals=(self.eipStatusChange.emit, ),
- status_signals=(self.openvpnStatusChange.emit, ),
- debug=self.debugmode,
- ovpn_verbosity=opts.openvpn_verb,
- provider=provider)
-
- # Do we want to enable the skip checks w/o being
- # in debug mode??
- #self.skip_download = opts.no_provider_checks
- #self.skip_verify = opts.no_ca_verify
- self.skip_download = False
- self.skip_verify = False
-
- def run_eip_checks(self):
- """
- runs eip checks and
- the error checking loop
- """
- logger.debug('running EIP CHECKS')
- self.conductor.run_checks(
- skip_download=self.skip_download,
- skip_verify=self.skip_verify)
- self.error_check()
-
- self.start_eipconnection.emit()
-
- def error_check(self):
- """
- consumes the conductor error queue.
- pops errors, and acts accordingly (launching user dialogs).
- """
- logger.debug('error check')
-
- errq = self.conductor.error_queue
- while errq.qsize() != 0:
- logger.debug('%s errors left in conductor queue', errq.qsize())
- # we get exception and original traceback from queue
- error, tb = errq.get()
-
- # redundant log, debugging the loop.
- logger.error('%s: %s', error.__class__.__name__, error.message)
-
- if issubclass(error.__class__, eip_exceptions.EIPClientError):
- self.triggerEIPError.emit(error)
-
- else:
- # deprecated form of raising exception.
- raise error, None, tb
-
- if error.failfirst is True:
- break
-
- @QtCore.pyqtSlot(object)
- def onEIPError(self, error):
- """
- check severity and launches
- dialogs informing user about the errors.
- in the future we plan to derive errors to
- our log viewer.
- """
- if self.ERR_DIALOG:
- logger.warning('another error dialog suppressed')
- return
-
- # XXX this is actually a one-shot.
- # On the dialog there should be
- # a reset signal binded to the ok button
- # or something like that.
- self.ERR_DIALOG = True
-
- if getattr(error, 'usermessage', None):
- message = error.usermessage
- else:
- message = error.message
-
- # XXX
- # check headless = False before
- # launching dialog.
- # (so Qt tests can assert stuff)
-
- if error.critical:
- logger.critical(error.message)
- #critical error (non recoverable),
- #we give user some info and quit.
- #(critical error dialog will exit app)
- ErrorDialog(errtype="critical",
- msg=message,
- label="critical error")
-
- elif error.warning:
- logger.warning(error.message)
-
- else:
- dialog = ErrorDialog()
- dialog.warningMessage(message, 'error')
-
- @QtCore.pyqtSlot()
- def statusUpdate(self):
- """
- polls status and updates ui with real time
- info about transferred bytes / connection state.
- right now is triggered by a timer tick
- (timer controlled by StatusAwareTrayIcon class)
- """
- # TODO I guess it's too expensive to poll
- # continously. move to signal events instead.
- # (i.e., subscribe to connection status changes
- # from openvpn manager)
-
- if not self.eip_service_started:
- # there is a race condition
- # going on here. Depending on how long we take
- # to init the qt app, the management socket
- # is not ready yet.
- return
-
- #if self.conductor.with_errors:
- #XXX how to wait on pkexec???
- #something better that this workaround, plz!!
- #I removed the pkexec pass authentication at all.
- #time.sleep(5)
- #logger.debug('timeout')
- #logger.error('errors. disconnect')
- #self.start_or_stopVPN() # is stop
-
- state = self.conductor.poll_connection_state()
- if not state:
- return
-
- ts, con_status, ok, ip, remote = state
- self.set_statusbarMessage(con_status)
- self.setIconToolTip()
-
- ts = time.strftime("%a %b %d %X", ts)
- if self.debugmode:
- self.updateTS.setText(ts)
- self.status_label.setText(con_status)
- self.ip_label.setText(ip)
- self.remote_label.setText(remote)
- self.remote_country.setText(
- geo.get_country_name(remote))
-
- # status i/o
-
- status = self.conductor.get_status_io()
- if status and self.debugmode:
- #XXX move this to systray menu indicators
- ts, (tun_read, tun_write, tcp_read, tcp_write, auth_read) = status
- ts = time.strftime("%a %b %d %X", ts)
- self.updateTS.setText(ts)
- self.tun_read_bytes.setText(tun_read)
- self.tun_write_bytes.setText(tun_write)
-
- # connection information via management interface
- log = self.conductor.get_log()
- error_matrix = [(EVENT_CONNECT_REFUSED, (self.start_or_stopVPN, ))]
- if hasattr(self.network_checker, 'checker'):
- self.network_checker.checker.parse_log_and_react(log, error_matrix)
-
- @QtCore.pyqtSlot()
- def start_or_stopVPN(self, **kwargs):
- """
- stub for running child process with vpn
- """
- if self.conductor.has_errors():
- logger.debug('not starting vpn; conductor has errors')
- return
-
- if self.eip_service_started is False:
- try:
- self.conductor.connect()
-
- except eip_exceptions.EIPNoCommandError as exc:
- logger.error('tried to run openvpn but no command is set')
- self.triggerEIPError.emit(exc)
-
- except Exception as err:
- # raise generic exception (Bad Thing Happened?)
- logger.exception(err)
- else:
- # no errors, so go on.
- if self.debugmode:
- self.startStopButton.setText(self.tr('&Disconnect'))
- self.eip_service_started = True
- self.toggleEIPAct()
-
- # XXX decouple! (timer is init by icons class).
- # we could bring Timer Init to this Mixin
- # or to its own Mixin.
- self.timer.start(constants.TIMER_MILLISECONDS)
- return
-
- if self.eip_service_started is True:
- self.network_checker.stop()
- self.conductor.disconnect()
- if self.debugmode:
- self.startStopButton.setText(self.tr('&Connect'))
- self.eip_service_started = False
- self.toggleEIPAct()
- self.timer.stop()
- return
diff --git a/src/leap/baseapp/leap_app.py b/src/leap/baseapp/leap_app.py
deleted file mode 100644
index 4d3aebd6..00000000
--- a/src/leap/baseapp/leap_app.py
+++ /dev/null
@@ -1,153 +0,0 @@
-import logging
-
-import sip
-sip.setapi('QVariant', 2)
-
-from PyQt4 import QtCore
-from PyQt4 import QtGui
-
-from leap.gui import mainwindow_rc
-
-logger = logging.getLogger(name=__name__)
-
-
-APP_LOGO = ':/images/leap-color-small.png'
-
-
-class MainWindowMixin(object):
- """
- create the main window
- for leap app
- """
-
- def __init__(self, *args, **kwargs):
- # XXX set initial visibility
- # debug = no visible
-
- widget = QtGui.QWidget()
- self.setCentralWidget(widget)
-
- mainLayout = QtGui.QVBoxLayout()
- # add widgets to layout
- #self.createWindowHeader()
- #mainLayout.addWidget(self.headerBox)
-
- # created in systray
- mainLayout.addWidget(self.statusIconBox)
- if self.debugmode:
- mainLayout.addWidget(self.statusBox)
- mainLayout.addWidget(self.loggerBox)
- widget.setLayout(mainLayout)
-
- self.createMainActions()
- self.createMainMenus()
-
- self.setWindowTitle("LEAP Client")
- self.set_app_icon()
- self.set_statusbarMessage('ready')
-
- def createMainActions(self):
- #self.openAct = QtGui.QAction("&Open...", self, shortcut="Ctrl+O",
- #triggered=self.open)
-
- self.firstRunWizardAct = QtGui.QAction(
- "&First run wizard...", self,
- triggered=self.stop_connection_and_launch_first_run_wizard)
- self.aboutAct = QtGui.QAction("&About", self, triggered=self.about)
-
- #self.aboutQtAct = QtGui.QAction("About &Qt", self,
- #triggered=QtGui.qApp.aboutQt)
-
- def createMainMenus(self):
- self.connMenu = QtGui.QMenu("&Connections", self)
- #self.viewMenu.addSeparator()
- self.connMenu.addAction(self.quitAction)
-
- self.settingsMenu = QtGui.QMenu("&Settings", self)
- self.settingsMenu.addAction(self.firstRunWizardAct)
-
- self.helpMenu = QtGui.QMenu("&Help", self)
- self.helpMenu.addAction(self.aboutAct)
- #self.helpMenu.addAction(self.aboutQtAct)
-
- self.menuBar().addMenu(self.connMenu)
- self.menuBar().addMenu(self.settingsMenu)
- self.menuBar().addMenu(self.helpMenu)
-
- def stop_connection_and_launch_first_run_wizard(self):
- settings = QtCore.QSettings()
- settings.setValue('FirstRunWizardDone', False)
- logger.debug('should run first run wizard again...')
-
- status = self.conductor.get_icon_name()
- if status != "disconnected":
- self.start_or_stopVPN()
-
- self.launch_first_run_wizard()
- #from leap.gui.firstrunwizard import FirstRunWizard
- #wizard = FirstRunWizard(
- #parent=self,
- #success_cb=self.initReady.emit)
- #wizard.show()
-
- def set_app_icon(self):
- icon = QtGui.QIcon(APP_LOGO)
- self.setWindowIcon(icon)
-
- #def createWindowHeader(self):
- #"""
- #description lines for main window
- #"""
- #self.headerBox = QtGui.QGroupBox()
- #self.headerLabel = QtGui.QLabel(
- #"<font size=40>LEAP Encryption Access Project</font>")
- #self.headerLabelSub = QtGui.QLabel(
- #"<br><i>your internet encryption toolkit</i>")
-#
- #pixmap = QtGui.QPixmap(APP_LOGO)
- #leap_lbl = QtGui.QLabel()
- #leap_lbl.setPixmap(pixmap)
-#
- #headerLayout = QtGui.QHBoxLayout()
- #headerLayout.addWidget(leap_lbl)
- #headerLayout.addWidget(self.headerLabel)
- #headerLayout.addWidget(self.headerLabelSub)
- #headerLayout.addStretch()
- #self.headerBox.setLayout(headerLayout)
-
- def set_statusbarMessage(self, msg):
- self.statusBar().showMessage(msg)
-
- def closeEvent(self, event):
- """
- redefines close event (persistent window behaviour)
- """
- if self.trayIcon.isVisible() and not self.debugmode:
- QtGui.QMessageBox.information(
- self, "Systray",
- "The program will keep running "
- "in the system tray. To "
- "terminate the program, choose "
- "<b>Quit</b> in the "
- "context menu of the system tray entry.")
- self.hide()
- event.ignore()
- return
- self.cleanupAndQuit()
-
- def cleanupAndQuit(self):
- """
- cleans state before shutting down app.
- """
- # save geometry for restoring
- settings = QtCore.QSettings()
- geom_key = "DebugGeometry" if self.debugmode else "Geometry"
- settings.setValue(geom_key, self.saveGeometry())
-
- # TODO:make sure to shutdown all child process / threads
- # in conductor
- # XXX send signal instead?
- logger.info('Shutting down')
- self.conductor.disconnect(shutdown=True)
- logger.info('Exiting. Bye.')
- QtGui.qApp.quit()
diff --git a/src/leap/baseapp/log.py b/src/leap/baseapp/log.py
deleted file mode 100644
index 636e5bae..00000000
--- a/src/leap/baseapp/log.py
+++ /dev/null
@@ -1,69 +0,0 @@
-import logging
-
-from PyQt4 import QtGui
-from PyQt4 import QtCore
-
-vpnlogger = logging.getLogger('leap.openvpn')
-
-
-class LogPaneMixin(object):
- """
- a simple log pane
- that writes new lines as they come
- """
- EXCLUDES = ('MANAGEMENT',)
-
- def createLogBrowser(self):
- """
- creates Browser widget for displaying logs
- (in debug mode only).
- """
- self.loggerBox = QtGui.QGroupBox()
- logging_layout = QtGui.QVBoxLayout()
- self.logbrowser = QtGui.QTextBrowser()
-
- startStopButton = QtGui.QPushButton(self.tr("&Connect"))
- self.startStopButton = startStopButton
-
- logging_layout.addWidget(self.logbrowser)
- logging_layout.addWidget(self.startStopButton)
- self.loggerBox.setLayout(logging_layout)
-
- # status box
-
- self.statusBox = QtGui.QGroupBox()
- grid = QtGui.QGridLayout()
-
- self.updateTS = QtGui.QLabel('')
- self.status_label = QtGui.QLabel(self.tr('Disconnected'))
- self.ip_label = QtGui.QLabel('')
- self.remote_label = QtGui.QLabel('')
- self.remote_country = QtGui.QLabel('')
-
- tun_read_label = QtGui.QLabel("tun read")
- self.tun_read_bytes = QtGui.QLabel("0")
- tun_write_label = QtGui.QLabel("tun write")
- self.tun_write_bytes = QtGui.QLabel("0")
-
- grid.addWidget(self.updateTS, 0, 0)
- grid.addWidget(self.status_label, 0, 1)
- grid.addWidget(self.ip_label, 1, 0)
- grid.addWidget(self.remote_label, 1, 1)
- grid.addWidget(self.remote_country, 2, 1)
- grid.addWidget(tun_read_label, 3, 0)
- grid.addWidget(self.tun_read_bytes, 3, 1)
- grid.addWidget(tun_write_label, 4, 0)
- grid.addWidget(self.tun_write_bytes, 4, 1)
-
- self.statusBox.setLayout(grid)
-
- @QtCore.pyqtSlot(str)
- def onLoggerNewLine(self, line):
- """
- simple slot: writes new line to logger Pane.
- """
- msg = line[:-1]
- if self.debugmode and all(map(lambda w: w not in msg,
- LogPaneMixin.EXCLUDES)):
- self.logbrowser.append(msg)
- vpnlogger.info(msg)
diff --git a/src/leap/baseapp/mainwindow.py b/src/leap/baseapp/mainwindow.py
deleted file mode 100644
index 91b0dc61..00000000
--- a/src/leap/baseapp/mainwindow.py
+++ /dev/null
@@ -1,191 +0,0 @@
-# vim: set fileencoding=utf-8 :
-#!/usr/bin/env python
-import logging
-
-import sip
-sip.setapi('QString', 2)
-sip.setapi('QVariant', 2)
-
-from PyQt4 import QtCore
-from PyQt4 import QtGui
-
-from leap.baseapp.eip import EIPConductorAppMixin
-from leap.baseapp.log import LogPaneMixin
-from leap.baseapp.systray import StatusAwareTrayIconMixin
-from leap.baseapp.network import NetworkCheckerAppMixin
-from leap.baseapp.leap_app import MainWindowMixin
-from leap.eip.checks import ProviderCertChecker
-from leap.gui.threads import FunThread
-
-logger = logging.getLogger(name=__name__)
-
-
-class LeapWindow(QtGui.QMainWindow,
- MainWindowMixin, EIPConductorAppMixin,
- StatusAwareTrayIconMixin,
- NetworkCheckerAppMixin,
- LogPaneMixin):
- """
- main window for the leap app.
- Initializes all of its base classes
- We keep here some signal initialization
- that gets tricky otherwise.
- """
-
- # signals
-
- newLogLine = QtCore.pyqtSignal([str])
- mainappReady = QtCore.pyqtSignal([])
- initReady = QtCore.pyqtSignal([])
- networkError = QtCore.pyqtSignal([object])
- triggerEIPError = QtCore.pyqtSignal([object])
- start_eipconnection = QtCore.pyqtSignal([])
- shutdownSignal = QtCore.pyqtSignal([])
- initNetworkChecker = QtCore.pyqtSignal([])
-
- # this is status change got from openvpn management
- openvpnStatusChange = QtCore.pyqtSignal([object])
- # this is global eip status
- eipStatusChange = QtCore.pyqtSignal([str])
-
- def __init__(self, opts):
- logger.debug('init leap window')
- self.debugmode = getattr(opts, 'debug', False)
- super(LeapWindow, self).__init__()
- if self.debugmode:
- self.createLogBrowser()
-
- settings = QtCore.QSettings()
- self.provider_domain = settings.value("provider_domain", None)
- self.username = settings.value("username", None)
-
- logger.debug('provider: %s', self.provider_domain)
- logger.debug('username: %s', self.username)
-
- provider = self.provider_domain
- EIPConductorAppMixin.__init__(
- self, opts=opts, provider=provider)
- StatusAwareTrayIconMixin.__init__(self)
-
- # XXX network checker should probably not
- # trigger run_checks on init... but wait
- # for ready signal instead...
- NetworkCheckerAppMixin.__init__(self, provider=provider)
- MainWindowMixin.__init__(self)
-
- geom_key = "DebugGeometry" if self.debugmode else "Geometry"
- geom = settings.value(geom_key)
- if geom:
- self.restoreGeometry(geom)
-
- # XXX check for wizard
- self.wizard_done = settings.value("FirstRunWizardDone")
-
- self.initchecks = FunThread(self.run_eip_checks)
-
- # bind signals
- self.initchecks.finished.connect(
- lambda: logger.debug('Initial checks thread finished'))
- self.trayIcon.activated.connect(self.iconActivated)
- self.newLogLine.connect(
- lambda line: self.onLoggerNewLine(line))
- self.timer.timeout.connect(
- lambda: self.onTimerTick())
- self.networkError.connect(
- lambda exc: self.onNetworkError(exc))
- self.triggerEIPError.connect(
- lambda exc: self.onEIPError(exc))
-
- if self.debugmode:
- self.startStopButton.clicked.connect(
- lambda: self.start_or_stopVPN())
- self.start_eipconnection.connect(
- self.do_start_eipconnection)
- self.shutdownSignal.connect(
- self.cleanupAndQuit)
- self.initNetworkChecker.connect(
- lambda: self.init_network_checker(self.conductor.provider))
-
- # status change.
- # TODO unify
- self.openvpnStatusChange.connect(
- lambda status: self.onOpenVPNStatusChange(status))
- self.eipStatusChange.connect(
- lambda newstatus: self.onEIPConnStatusChange(newstatus))
- self.eipStatusChange.connect(
- lambda newstatus: self.toggleEIPAct())
-
- # do first run wizard and init signals
- self.mainappReady.connect(self.do_first_run_wizard_check)
- self.initReady.connect(self.runchecks_and_eipconnect)
-
- # ... all ready. go!
- # connected to do_first_run_wizard_check
- self.mainappReady.emit()
-
- def do_first_run_wizard_check(self):
- """
- checks whether first run wizard needs to be run
- launches it if needed
- and emits initReady signal if not.
- """
-
- logger.debug('first run wizard check...')
- need_wizard = False
-
- # do checks (can overlap if wizard was interrupted)
- if not self.wizard_done:
- need_wizard = True
-
- if not self.provider_domain:
- need_wizard = True
- else:
- pcertchecker = ProviderCertChecker(domain=self.provider_domain)
- if not pcertchecker.is_cert_valid(do_raise=False):
- logger.warning('missing valid client cert. need wizard')
- need_wizard = True
-
- # launch wizard if needed
- if need_wizard:
- logger.debug('running first run wizard')
- self.launch_first_run_wizard()
- else: # no wizard needed
- self.initReady.emit()
-
- def launch_first_run_wizard(self):
- """
- launches wizard and blocks
- """
- from leap.gui.firstrun.wizard import FirstRunWizard
- wizard = FirstRunWizard(
- self.conductor,
- parent=self,
- username=self.username,
- start_eipconnection_signal=self.start_eipconnection,
- eip_statuschange_signal=self.eipStatusChange,
- quitcallback=self.onWizardCancel)
- wizard.show()
-
- def onWizardCancel(self):
- if not self.wizard_done:
- logger.debug(
- 'clicked on Cancel during first '
- 'run wizard. shutting down')
- self.cleanupAndQuit()
-
- def runchecks_and_eipconnect(self):
- """
- shows icon and run init checks
- """
- self.show_systray_icon()
- self.initchecks.begin()
-
- def do_start_eipconnection(self):
- """
- shows icon and init eip connection
- called from the end of wizard
- """
- self.show_systray_icon()
- # this will setup the command
- self.conductor.run_openvpn_checks()
- self.start_or_stopVPN()
diff --git a/src/leap/baseapp/network.py b/src/leap/baseapp/network.py
deleted file mode 100644
index dc5182a4..00000000
--- a/src/leap/baseapp/network.py
+++ /dev/null
@@ -1,63 +0,0 @@
-from __future__ import print_function
-
-import logging
-
-logger = logging.getLogger(name=__name__)
-
-from PyQt4 import QtCore
-
-from leap.baseapp.dialogs import ErrorDialog
-from leap.base.network import NetworkCheckerThread
-
-from leap.util.misc import null_check
-
-
-class NetworkCheckerAppMixin(object):
- """
- initialize an instance of the Network Checker,
- which gathers error and passes them on.
- """
- ERR_NETERR = False
-
- def __init__(self, *args, **kwargs):
- provider = kwargs.pop('provider', None)
- self.network_checker = None
- if provider:
- self.init_network_checker(provider)
-
- def init_network_checker(self, provider):
- null_check(provider, "provider_domain")
- if not self.network_checker:
- self.network_checker = NetworkCheckerThread(
- error_cb=self.networkError.emit,
- debug=self.debugmode,
- provider=provider)
- self.network_checker.start()
-
- @QtCore.pyqtSlot(object)
- def runNetworkChecks(self):
- logger.debug('running checks (from NetworkChecker Mixin slot)')
- self.network_checker.run_checks()
-
- @QtCore.pyqtSlot(object)
- def onNetworkError(self, exc):
- """
- slot that receives a network exceptions
- and raises a user error message
- """
- # FIXME this should not HANDLE anything after
- # the network check thread has been stopped.
-
- logger.debug('handling network exception')
- if not self.ERR_NETERR:
- self.ERR_NETERR = True
-
- logger.error(exc.message)
- dialog = ErrorDialog(parent=self)
- if exc.critical:
- dialog.criticalMessage(exc.usermessage, "network error")
- else:
- dialog.warningMessage(exc.usermessage, "network error")
-
- self.start_or_stopVPN()
- self.network_checker.stop()
diff --git a/src/leap/baseapp/permcheck.py b/src/leap/baseapp/permcheck.py
deleted file mode 100644
index 6b74cb6e..00000000
--- a/src/leap/baseapp/permcheck.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import commands
-import os
-
-from leap.util.fileutil import which
-
-
-def is_pkexec_in_system():
- pkexec_path = which('pkexec')
- if not pkexec_path:
- return False
- return os.access(pkexec_path, os.X_OK)
-
-
-def is_auth_agent_running():
- return bool(
- commands.getoutput(
- 'ps aux | grep polkit-[g]nome-authentication-agent-1'))
diff --git a/src/leap/baseapp/systray.py b/src/leap/baseapp/systray.py
deleted file mode 100644
index 77eb3fe9..00000000
--- a/src/leap/baseapp/systray.py
+++ /dev/null
@@ -1,268 +0,0 @@
-import logging
-import sys
-
-import sip
-sip.setapi('QString', 2)
-sip.setapi('QVariant', 2)
-
-from PyQt4 import QtCore
-from PyQt4 import QtGui
-
-from leap import __branding as BRANDING
-from leap import __version__ as VERSION
-
-from leap.gui import mainwindow_rc
-
-logger = logging.getLogger(__name__)
-
-
-class StatusAwareTrayIconMixin(object):
- """
- a mix of several functions needed
- to create a systray and make it
- get updated from conductor status
- polling.
- """
- states = {
- "disconnected": 0,
- "connecting": 1,
- "connected": 2}
-
- iconpath = {
- "disconnected": ':/images/conn_error.png',
- "connecting": ':/images/conn_connecting.png',
- "connected": ':/images/conn_connected.png'}
-
- Icons = {
- 'disconnected': lambda self: QtGui.QIcon(
- self.iconpath['disconnected']),
- 'connecting': lambda self: QtGui.QIcon(
- self.iconpath['connecting']),
- 'connected': lambda self: QtGui.QIcon(
- self.iconpath['connected'])
- }
-
- def __init__(self, *args, **kwargs):
- self.createIconGroupBox()
- self.createActions()
- self.createTrayIcon()
-
- # not sure if this really belongs here, but...
- self.timer = QtCore.QTimer()
-
- def show_systray_icon(self):
- #logger.debug('showing tray icon................')
- self.trayIcon.show()
-
- def createIconGroupBox(self):
- """
- dummy icongroupbox
- (to be removed from here -- reference only)
- """
- con_widgets = {
- 'disconnected': QtGui.QLabel(),
- 'connecting': QtGui.QLabel(),
- 'connected': QtGui.QLabel(),
- }
- con_widgets['disconnected'].setPixmap(
- QtGui.QPixmap(
- self.iconpath['disconnected']))
- con_widgets['connecting'].setPixmap(
- QtGui.QPixmap(
- self.iconpath['connecting']))
- con_widgets['connected'].setPixmap(
- QtGui.QPixmap(
- self.iconpath['connected'])),
- self.ConnectionWidgets = con_widgets
-
- self.statusIconBox = QtGui.QGroupBox(
- self.tr("EIP Connection Status"))
- statusIconLayout = QtGui.QHBoxLayout()
- statusIconLayout.addWidget(self.ConnectionWidgets['disconnected'])
- statusIconLayout.addWidget(self.ConnectionWidgets['connecting'])
- statusIconLayout.addWidget(self.ConnectionWidgets['connected'])
- statusIconLayout.itemAt(1).widget().hide()
- statusIconLayout.itemAt(2).widget().hide()
-
- self.leapConnStatus = QtGui.QLabel(
- self.tr("<b>disconnected</b>"))
- statusIconLayout.addWidget(self.leapConnStatus)
-
- self.statusIconBox.setLayout(statusIconLayout)
-
- def createTrayIcon(self):
- """
- creates the tray icon
- """
- self.trayIconMenu = QtGui.QMenu(self)
-
- self.trayIconMenu.addAction(self.connAct)
- self.trayIconMenu.addSeparator()
- self.trayIconMenu.addAction(self.detailsAct)
- self.trayIconMenu.addSeparator()
- self.trayIconMenu.addAction(self.aboutAct)
- # we should get this hidden inside the "about" dialog
- # (as a little button maybe)
- #self.trayIconMenu.addAction(self.aboutQtAct)
- self.trayIconMenu.addSeparator()
- self.trayIconMenu.addAction(self.quitAction)
-
- self.trayIcon = QtGui.QSystemTrayIcon(self)
- self.setIcon('disconnected')
- self.trayIcon.setContextMenu(self.trayIconMenu)
-
- #self.trayIconMenu.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
- #self.trayIconMenu.customContextMenuRequested.connect(
- #self.on_context_menu)
-
- #def bad(self):
- #logger.error('this should not be called')
-
- def createActions(self):
- """
- creates actions to be binded to tray icon
- """
- # XXX change action name on (dis)connect
- self.connAct = QtGui.QAction(
- self.tr("Encryption ON turn &off"),
- self,
- triggered=lambda: self.start_or_stopVPN())
-
- self.detailsAct = QtGui.QAction(
- self.tr("&Details..."),
- self,
- triggered=self.detailsWin)
- self.aboutAct = QtGui.QAction(
- self.tr("&About"), self,
- triggered=self.about)
- self.aboutQtAct = QtGui.QAction(
- self.tr("About Q&t"), self,
- triggered=QtGui.qApp.aboutQt)
- self.quitAction = QtGui.QAction(
- self.tr("&Quit"), self,
- triggered=self.cleanupAndQuit)
-
- def toggleEIPAct(self):
- # this is too simple by now.
- # XXX get STATUS CONSTANTS INSTEAD
-
- icon_status = self.conductor.get_icon_name()
- if icon_status == "connected":
- self.connAct.setEnabled(True)
- self.connAct.setText(
- self.tr('Encryption ON turn o&ff'))
- return
- if icon_status == "disconnected":
- self.connAct.setEnabled(True)
- self.connAct.setText(
- self.tr('Encryption OFF turn &on'))
- return
- if icon_status == "connecting":
- self.connAct.setDisabled(True)
- self.connAct.setText(self.tr('connecting...'))
- return
-
- def detailsWin(self):
- visible = self.isVisible()
- if visible:
- self.hide()
- else:
- self.show()
- if sys.platform == "darwin":
- self.raise_()
-
- def about(self):
- # move to widget
- flavor = BRANDING.get('short_name', None)
- content = self.tr(
- ("LEAP client<br>"
- "(version <b>%s</b>)<br>" % VERSION))
- if flavor:
- content = content + ('<br>Flavor: <i>%s</i><br>' % flavor)
- content = content + (
- "<br><a href='https://leap.se/'>"
- "https://leap.se</a>")
- QtGui.QMessageBox.about(self, self.tr("About"), content)
-
- def setConnWidget(self, icon_name):
- oldlayout = self.statusIconBox.layout()
-
- for i in range(3):
- oldlayout.itemAt(i).widget().hide()
- new = self.states[icon_name]
- oldlayout.itemAt(new).widget().show()
-
- def setIcon(self, name):
- icon_fun = self.Icons.get(name)
- if icon_fun and callable(icon_fun):
- icon = icon_fun(self)
- self.trayIcon.setIcon(icon)
-
- def getIcon(self, icon_name):
- return self.states.get(icon_name, None)
-
- def setIconToolTip(self):
- """
- get readable status and place it on systray tooltip
- """
- status = self.conductor.status.get_readable_status()
- self.trayIcon.setToolTip(status)
-
- def iconActivated(self, reason):
- """
- handles left click, left double click
- showing the trayicon menu
- """
- if reason in (QtGui.QSystemTrayIcon.Trigger,
- QtGui.QSystemTrayIcon.DoubleClick):
- context_menu = self.trayIcon.contextMenu()
- # for some reason, context_menu.show()
- # is failing in a way beyond my understanding.
- # (not working the first time it's clicked).
- # this works however.
- # XXX in osx it shows some glitches.
- context_menu.exec_(self.trayIcon.geometry().center())
-
- @QtCore.pyqtSlot()
- def onTimerTick(self):
- self.statusUpdate()
-
- @QtCore.pyqtSlot(object)
- def onOpenVPNStatusChange(self, status):
- """
- updates icon, according to the openvpn status change.
- """
- icon_name = self.conductor.get_icon_name()
- if not icon_name:
- return
-
- # XXX refactor. Use QStateMachine
-
- if icon_name in ("disconnected", "connected"):
- self.eipStatusChange.emit(icon_name)
-
- if icon_name in ("connecting"):
- # let's see how it matches
- leap_status_name = self.conductor.get_leap_status()
- self.eipStatusChange.emit(leap_status_name)
-
- if icon_name == "connected":
- # When we change to "connected', we launch
- # the network checker.
- self.initNetworkChecker.emit()
-
- self.setIcon(icon_name)
- # change connection pixmap widget
- self.setConnWidget(icon_name)
-
- @QtCore.pyqtSlot(str)
- def onEIPConnStatusChange(self, newstatus):
- """
- slot for EIP status changes
- not to be confused with onOpenVPNStatusChange.
- this only updates the non-debug LEAP Status line
- next to the connection icon.
- """
- # XXX move bold to style sheet
- self.leapConnStatus.setText(
- "<b>%s</b>" % newstatus)
diff --git a/src/leap/certs/__init__.py b/src/leap/certs/__init__.py
deleted file mode 100644
index c4d009b1..00000000
--- a/src/leap/certs/__init__.py
+++ /dev/null
@@ -1,7 +0,0 @@
-import os
-
-_where = os.path.split(__file__)[0]
-
-
-def where(filename):
- return os.path.join(_where, filename)
diff --git a/src/leap/crypto/__init__.py b/src/leap/crypto/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/crypto/__init__.py
+++ /dev/null
diff --git a/src/leap/crypto/certs.py b/src/leap/crypto/certs.py
deleted file mode 100644
index cbb5725a..00000000
--- a/src/leap/crypto/certs.py
+++ /dev/null
@@ -1,112 +0,0 @@
-import logging
-import os
-from StringIO import StringIO
-import ssl
-import time
-
-from dateutil.parser import parse
-from OpenSSL import crypto
-
-from leap.util.misc import null_check
-
-logger = logging.getLogger(__name__)
-
-
-class BadCertError(Exception):
- """
- raised for malformed certs
- """
-
-
-class NoCertError(Exception):
- """
- raised for cert not found in given path
- """
-
-
-def get_https_cert_from_domain(domain, port=443):
- """
- @param domain: a domain name to get a certificate from.
- """
- cert = ssl.get_server_certificate((domain, port))
- x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
- return x509
-
-
-def get_cert_from_file(_file):
- null_check(_file, "pem file")
- if isinstance(_file, (str, unicode)):
- if not os.path.isfile(_file):
- raise NoCertError
- with open(_file) as f:
- cert = f.read()
- else:
- cert = _file.read()
- x509 = crypto.load_certificate(crypto.FILETYPE_PEM, cert)
- return x509
-
-
-def get_pkey_from_file(_file):
- getkey = lambda f: crypto.load_privatekey(
- crypto.FILETYPE_PEM, f.read())
-
- if isinstance(_file, str):
- with open(_file) as f:
- key = getkey(f)
- else:
- key = getkey(_file)
- return key
-
-
-def can_load_cert_and_pkey(string):
- """
- loads certificate and private key from
- a buffer
- """
- try:
- f = StringIO(string)
- cert = get_cert_from_file(f)
-
- f = StringIO(string)
- key = get_pkey_from_file(f)
-
- null_check(cert, 'certificate')
- null_check(key, 'private key')
- except Exception as exc:
- logger.error(type(exc), exc.message)
- raise BadCertError
- else:
- return True
-
-
-def get_cert_fingerprint(domain=None, port=443, filepath=None,
- hash_type="SHA256", sep=":"):
- """
- @param domain: a domain name to get a fingerprint from
- @type domain: str
- @param filepath: path to a file containing a PEM file
- @type filepath: str
- @param hash_type: the hash function to be used in the fingerprint.
- must be one of SHA1, SHA224, SHA256, SHA384, SHA512
- @type hash_type: str
- @rparam: hex_fpr, a hexadecimal representation of a bytestring
- containing the fingerprint.
- @rtype: string
- """
- if domain:
- cert = get_https_cert_from_domain(domain, port=port)
- if filepath:
- cert = get_cert_from_file(filepath)
- hex_fpr = cert.digest(hash_type)
- return hex_fpr
-
-
-def get_time_boundaries(certfile):
- cert = get_cert_from_file(certfile)
- null_check(cert, 'certificate')
-
- fromts, tots = (cert.get_notBefore(), cert.get_notAfter())
- from_, to_ = map(
- lambda ts: time.gmtime(time.mktime(parse(ts).timetuple())),
- (fromts, tots))
- return from_, to_
diff --git a/src/leap/crypto/certs_gnutls.py b/src/leap/crypto/certs_gnutls.py
deleted file mode 100644
index 20c0e043..00000000
--- a/src/leap/crypto/certs_gnutls.py
+++ /dev/null
@@ -1,112 +0,0 @@
-'''
-We're using PyOpenSSL now
-
-import ctypes
-from StringIO import StringIO
-import socket
-
-import gnutls.connection
-import gnutls.crypto
-import gnutls.library
-
-from leap.util.misc import null_check
-
-
-class BadCertError(Exception):
- """raised for malformed certs"""
-
-
-def get_https_cert_from_domain(domain):
- """
- @param domain: a domain name to get a certificate from.
- """
- sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- cred = gnutls.connection.X509Credentials()
-
- session = gnutls.connection.ClientSession(sock, cred)
- session.connect((domain, 443))
- session.handshake()
- cert = session.peer_certificate
- return cert
-
-
-def get_cert_from_file(_file):
- getcert = lambda f: gnutls.crypto.X509Certificate(f.read())
- if isinstance(_file, str):
- with open(_file) as f:
- cert = getcert(f)
- else:
- cert = getcert(_file)
- return cert
-
-
-def get_pkey_from_file(_file):
- getkey = lambda f: gnutls.crypto.X509PrivateKey(f.read())
- if isinstance(_file, str):
- with open(_file) as f:
- key = getkey(f)
- else:
- key = getkey(_file)
- return key
-
-
-def can_load_cert_and_pkey(string):
- try:
- f = StringIO(string)
- cert = get_cert_from_file(f)
-
- f = StringIO(string)
- key = get_pkey_from_file(f)
-
- null_check(cert, 'certificate')
- null_check(key, 'private key')
- except:
- # XXX catch GNUTLSError?
- raise BadCertError
- else:
- return True
-
-def get_cert_fingerprint(domain=None, filepath=None,
- hash_type="SHA256", sep=":"):
- """
- @param domain: a domain name to get a fingerprint from
- @type domain: str
- @param filepath: path to a file containing a PEM file
- @type filepath: str
- @param hash_type: the hash function to be used in the fingerprint.
- must be one of SHA1, SHA224, SHA256, SHA384, SHA512
- @type hash_type: str
- @rparam: hex_fpr, a hexadecimal representation of a bytestring
- containing the fingerprint.
- @rtype: string
- """
- if domain:
- cert = get_https_cert_from_domain(domain)
- if filepath:
- cert = get_cert_from_file(filepath)
-
- _buffer = ctypes.create_string_buffer(64)
- buffer_length = ctypes.c_size_t(64)
-
- SUPPORTED_DIGEST_FUN = ("SHA1", "SHA224", "SHA256", "SHA384", "SHA512")
- if hash_type in SUPPORTED_DIGEST_FUN:
- digestfunction = getattr(
- gnutls.library.constants,
- "GNUTLS_DIG_%s" % hash_type)
- else:
- # XXX improperlyconfigured or something
- raise Exception("digest function not supported")
-
- gnutls.library.functions.gnutls_x509_crt_get_fingerprint(
- cert._c_object, digestfunction,
- ctypes.byref(_buffer), ctypes.byref(buffer_length))
-
- # deinit
- #server_cert._X509Certificate__deinit(server_cert._c_object)
- # needed? is segfaulting
-
- fpr = ctypes.string_at(_buffer, buffer_length.value)
- hex_fpr = sep.join(u"%02X" % ord(char) for char in fpr)
-
- return hex_fpr
-'''
diff --git a/src/leap/crypto/leapkeyring.py b/src/leap/crypto/leapkeyring.py
deleted file mode 100644
index c241d0bc..00000000
--- a/src/leap/crypto/leapkeyring.py
+++ /dev/null
@@ -1,70 +0,0 @@
-import keyring
-
-from leap.base.config import get_config_file
-
-#############
-# Disclaimer
-#############
-# This currently is not a keyring, it's more like a joke.
-# No, seriously.
-# We're affected by this **bug**
-
-# https://bitbucket.org/kang/python-keyring-lib/
-# issue/65/dbusexception-method-opensession-with
-
-# so using the gnome keyring does not seem feasible right now.
-# I thought this was the next best option to store secrets in plain sight.
-
-# in the future we should move to use the gnome/kde/macosx/win keyrings.
-
-
-class LeapCryptedFileKeyring(keyring.backend.CryptedFileKeyring):
-
- filename = ".secrets"
-
- @property
- def file_path(self):
- return get_config_file(self.filename)
-
- def __init__(self, seed=None):
- self.seed = seed
-
- def _get_new_password(self):
- # XXX every time this method is called,
- # $deity kills a kitten.
- return "secret%s" % self.seed
-
- def _init_file(self):
- self.keyring_key = self._get_new_password()
- self.set_password('keyring_setting', 'pass_ref', 'pass_ref_value')
-
- def _unlock(self):
- self.keyring_key = self._get_new_password()
- print 'keyring key ', self.keyring_key
- try:
- ref_pw = self.get_password(
- 'keyring_setting',
- 'pass_ref')
- print 'ref pw ', ref_pw
- assert ref_pw == "pass_ref_value"
- except AssertionError:
- self._lock()
- raise ValueError('Incorrect password')
-
-
-def leap_set_password(key, value, seed="xxx"):
- key, value = map(unicode, (key, value))
- keyring.set_keyring(LeapCryptedFileKeyring(seed=seed))
- keyring.set_password('leap', key, value)
-
-
-def leap_get_password(key, seed="xxx"):
- keyring.set_keyring(LeapCryptedFileKeyring(seed=seed))
- #import ipdb;ipdb.set_trace()
- return keyring.get_password('leap', key)
-
-
-if __name__ == "__main__":
- leap_set_password('test', 'bar')
- passwd = leap_get_password('test')
- assert passwd == 'bar'
diff --git a/src/leap/crypto/tests/__init__.py b/src/leap/crypto/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/crypto/tests/__init__.py
+++ /dev/null
diff --git a/src/leap/crypto/tests/test_certs.py b/src/leap/crypto/tests/test_certs.py
deleted file mode 100644
index e476b630..00000000
--- a/src/leap/crypto/tests/test_certs.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import unittest
-
-from leap.testing.https_server import where
-from leap.crypto import certs
-
-
-class CertTestCase(unittest.TestCase):
-
- def test_can_load_client_and_pkey(self):
- with open(where('leaptestscert.pem')) as cf:
- cs = cf.read()
- with open(where('leaptestskey.pem')) as kf:
- ks = kf.read()
- certs.can_load_cert_and_pkey(cs + ks)
-
- with self.assertRaises(certs.BadCertError):
- # screw header
- certs.can_load_cert_and_pkey(cs.replace("BEGIN", "BEGINN") + ks)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/eip/__init__.py b/src/leap/eip/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/eip/__init__.py
+++ /dev/null
diff --git a/src/leap/eip/checks.py b/src/leap/eip/checks.py
deleted file mode 100644
index 9a34a428..00000000
--- a/src/leap/eip/checks.py
+++ /dev/null
@@ -1,537 +0,0 @@
-import logging
-import time
-import os
-import sys
-
-import requests
-
-from leap import __branding as BRANDING
-from leap import certs as leapcerts
-from leap.base.auth import srpauth_protected, magick_srpauth
-from leap.base import config as baseconfig
-from leap.base import constants as baseconstants
-from leap.base import providers
-from leap.crypto import certs
-from leap.eip import config as eipconfig
-from leap.eip import constants as eipconstants
-from leap.eip import exceptions as eipexceptions
-from leap.eip import specs as eipspecs
-from leap.util.certs import get_mac_cabundle
-from leap.util.fileutil import mkdir_p
-from leap.util.web import get_https_domain_and_port
-
-logger = logging.getLogger(name=__name__)
-
-"""
-ProviderCertChecker
--------------------
-Checks on certificates. To be moved to base.
-docs TBD
-
-EIPConfigChecker
-----------
-It is used from the eip conductor (a instance of EIPConnection that is
-managed from the QtApp), running `run_all` method before trying to call
-`connect` or any other of the state-changing methods.
-
-It checks that the needed files are provided or can be discovered over the
-net. Much of these tests are not specific to EIP module, and can be splitted
-into base.tests to be invoked by the base leap init routines.
-However, I'm testing them alltogether for the sake of having the whole unit
-reachable and testable as a whole.
-
-"""
-
-
-def get_branding_ca_cert(domain):
- # deprecated
- ca_file = BRANDING.get('provider_ca_file')
- if ca_file:
- return leapcerts.where(ca_file)
-
-
-class ProviderCertChecker(object):
- """
- Several checks needed for getting
- client certs and checking tls connection
- with provider.
- """
- def __init__(self, fetcher=requests,
- domain=None):
-
- self.fetcher = fetcher
- self.domain = domain
- #XXX needs some kind of autoinit
- #right now we set by hand
- #by loading and reading provider config
- self.apidomain = None
- self.cacert = eipspecs.provider_ca_path(domain)
-
- def run_all(
- self, checker=None,
- skip_download=False, skip_verify=False):
-
- if not checker:
- checker = self
-
- do_verify = not skip_verify
- logger.debug('do_verify: %s', do_verify)
- # checker.download_ca_cert()
-
- # For MVS+
- # checker.download_ca_signature()
- # checker.get_ca_signatures()
- # checker.is_there_trust_path()
-
- # For MVS
- checker.is_there_provider_ca()
-
- checker.is_https_working(verify=do_verify, autocacert=False)
- checker.check_new_cert_needed(verify=do_verify)
-
- def download_ca_cert(self, uri=None, verify=True):
- req = self.fetcher.get(uri, verify=verify)
- req.raise_for_status()
-
- # should check domain exists
- capath = self._get_ca_cert_path(self.domain)
- with open(capath, 'w') as f:
- f.write(req.content)
-
- def check_ca_cert_fingerprint(
- self, hash_type="SHA256",
- fingerprint=None):
- """
- compares the fingerprint in
- the ca cert with a string
- we are passed
- returns True if they are equal, False if not.
- @param hash_type: digest function
- @type hash_type: str
- @param fingerprint: the fingerprint to compare with.
- @type fingerprint: str (with : separator)
- @rtype bool
- """
- ca_cert_path = self.ca_cert_path
- ca_cert_fpr = certs.get_cert_fingerprint(
- filepath=ca_cert_path)
- return ca_cert_fpr == fingerprint
-
- def verify_api_https(self, uri):
- assert uri.startswith('https://')
- cacert = self.ca_cert_path
- verify = cacert and cacert or True
- req = self.fetcher.get(uri, verify=verify)
- req.raise_for_status()
- return True
-
- def download_ca_signature(self):
- # MVS+
- raise NotImplementedError
-
- def get_ca_signatures(self):
- # MVS+
- raise NotImplementedError
-
- def is_there_trust_path(self):
- # MVS+
- raise NotImplementedError
-
- def is_there_provider_ca(self):
- if not self.cacert:
- return False
- cacert_exists = os.path.isfile(self.cacert)
- if cacert_exists:
- logger.debug('True')
- return True
- logger.debug('False!')
- return False
-
- def is_https_working(
- self, uri=None, verify=True,
- autocacert=False):
- if uri is None:
- uri = self._get_root_uri()
- # XXX raise InsecureURI or something better
- try:
- assert uri.startswith('https')
- except AssertionError:
- raise AssertionError(
- "uri passed should start with https")
- if autocacert and verify is True and self.cacert is not None:
- logger.debug('verify cert: %s', self.cacert)
- verify = self.cacert
- if sys.platform == "darwin":
- verify = get_mac_cabundle()
- logger.debug('checking https connection')
- logger.debug('uri: %s (verify:%s)', uri, verify)
-
- try:
- self.fetcher.get(uri, verify=verify)
-
- except requests.exceptions.SSLError as exc:
- raise eipexceptions.HttpsBadCertError
-
- except requests.exceptions.ConnectionError:
- logger.error('ConnectionError')
- raise eipexceptions.HttpsNotSupported
-
- else:
- return True
-
- def check_new_cert_needed(self, skip_download=False, verify=True):
- # XXX add autocacert
- if not self.is_cert_valid(do_raise=False):
- logger.debug('cert needed: true')
- self.download_new_client_cert(
- skip_download=skip_download,
- verify=verify)
- return True
- logger.debug('cert needed: false')
- return False
-
- def download_new_client_cert(self, uri=None, verify=True,
- skip_download=False,
- credentials=None):
- logger.debug('download new client cert')
- if skip_download:
- return True
- if uri is None:
- uri = self._get_client_cert_uri()
- # XXX raise InsecureURI or something better
- #assert uri.startswith('https')
-
- if verify is True and self.cacert is not None:
- verify = self.cacert
- logger.debug('verify = %s', verify)
-
- fgetfn = self.fetcher.get
-
- if credentials:
- user, passwd = credentials
- logger.debug('apidomain = %s', self.apidomain)
-
- @srpauth_protected(user, passwd,
- server="https://%s" % self.apidomain,
- verify=verify)
- def getfn(*args, **kwargs):
- return fgetfn(*args, **kwargs)
-
- else:
- # XXX FIXME fix decorated args
- @magick_srpauth(verify)
- def getfn(*args, **kwargs):
- return fgetfn(*args, **kwargs)
- try:
-
- req = getfn(uri, verify=verify)
- req.raise_for_status()
-
- except requests.exceptions.SSLError:
- logger.warning('SSLError while fetching cert. '
- 'Look below for stack trace.')
- # XXX raise better exception
- return self.fail("SSLError")
- except Exception as exc:
- return self.fail(exc.message)
-
- try:
- logger.debug('validating cert...')
- pemfile_content = req.content
- valid = self.is_valid_pemfile(pemfile_content)
- if not valid:
- logger.warning('invalid cert')
- return False
- cert_path = self._get_client_cert_path()
- self.write_cert(pemfile_content, to=cert_path)
- except:
- logger.warning('Error while validating cert')
- raise
- return True
-
- def is_cert_valid(self, cert_path=None, do_raise=True):
- exists = lambda: self.is_certificate_exists()
- valid_pemfile = lambda: self.is_valid_pemfile()
- not_expired = lambda: self.is_cert_not_expired()
-
- valid = exists() and valid_pemfile() and not_expired()
- if not valid:
- if do_raise:
- raise Exception('missing valid cert')
- else:
- return False
- return True
-
- def is_certificate_exists(self, certfile=None):
- if certfile is None:
- certfile = self._get_client_cert_path()
- return os.path.isfile(certfile)
-
- def is_cert_not_expired(self, certfile=None, now=time.gmtime):
- if certfile is None:
- certfile = self._get_client_cert_path()
- from_, to_ = certs.get_time_boundaries(certfile)
-
- return from_ < now() < to_
-
- def is_valid_pemfile(self, cert_s=None):
- """
- checks that the passed string
- is a valid pem certificate
- @param cert_s: string containing pem content
- @type cert_s: string
- @rtype: bool
- """
- if cert_s is None:
- certfile = self._get_client_cert_path()
- with open(certfile) as cf:
- cert_s = cf.read()
- try:
- valid = certs.can_load_cert_and_pkey(cert_s)
- except certs.BadCertError:
- logger.warning("Not valid pemfile")
- valid = False
- return valid
-
- @property
- def ca_cert_path(self):
- return self._get_ca_cert_path(self.domain)
-
- def _get_root_uri(self):
- return u"https://%s/" % self.domain
-
- def _get_client_cert_uri(self):
- return "https://%s/1/cert" % self.apidomain
-
- def _get_client_cert_path(self):
- return eipspecs.client_cert_path(domain=self.domain)
-
- def _get_ca_cert_path(self, domain):
- # XXX this folder path will be broken for win
- # and this should be moved to eipspecs.ca_path
-
- # XXX use baseconfig.get_provider_path(folder=Foo)
- # !!!
-
- capath = baseconfig.get_config_file(
- 'cacert.pem',
- folder='providers/%s/keys/ca' % domain)
- folder, fname = os.path.split(capath)
- if not os.path.isdir(folder):
- mkdir_p(folder)
- return capath
-
- def write_cert(self, pemfile_content, to=None):
- folder, filename = os.path.split(to)
- if not os.path.isdir(folder):
- mkdir_p(folder)
- with open(to, 'w') as cert_f:
- cert_f.write(pemfile_content)
-
- def set_api_domain(self, domain):
- self.apidomain = domain
-
-
-class EIPConfigChecker(object):
- """
- Several checks needed
- to ensure a EIPConnection
- can be sucessfully established.
- use run_all to run all checks.
- """
-
- def __init__(self, fetcher=requests, domain=None):
- # we do not want to accept too many
- # argument on init.
- # we want tests
- # to be explicitely run.
-
- self.fetcher = fetcher
-
- # if not domain, get from config
- self.domain = domain
- self.apidomain = None
- self.cacert = eipspecs.provider_ca_path(domain)
-
- self.defaultprovider = providers.LeapProviderDefinition(domain=domain)
- self.defaultprovider.load()
- self.eipconfig = eipconfig.EIPConfig(domain=domain)
- self.set_api_domain()
- self.eipserviceconfig = eipconfig.EIPServiceConfig(domain=domain)
- self.eipserviceconfig.load()
-
- def run_all(self, checker=None, skip_download=False):
- """
- runs all checks in a row.
- will raise if some error encountered.
- catching those exceptions is not
- our responsibility at this moment
- """
- if not checker:
- checker = self
-
- # let's call all tests
- # needed for a sane eip session.
-
- # TODO: get rid of check_default.
- # check_complete should
- # be enough. but here to make early tests easier.
- checker.check_default_eipconfig()
-
- checker.check_is_there_default_provider()
- checker.fetch_definition(skip_download=skip_download)
- checker.fetch_eip_service_config(skip_download=skip_download)
- checker.check_complete_eip_config()
- #checker.ping_gateway()
-
- # public checks
-
- def check_default_eipconfig(self):
- """
- checks if default eipconfig exists,
- and dumps a default file if not
- """
- # XXX ONLY a transient check
- # because some old function still checks
- # for eip config at the beginning.
-
- # it *really* does not make sense to
- # dump it right now, we can get an in-memory
- # config object and dump it to disk in a
- # later moment
- logger.debug('checking default eip config')
- if not self._is_there_default_eipconfig():
- self._dump_default_eipconfig()
-
- def check_is_there_default_provider(self, config=None):
- """
- raises EIPMissingDefaultProvider if no
- default provider found on eip config.
- This is catched by ui and runs FirstRunWizard (MVS+)
- """
- if config is None:
- config = self.eipconfig.config
- logger.debug('checking default provider')
- provider = config.get('provider', None)
- if provider is None:
- raise eipexceptions.EIPMissingDefaultProvider
- # XXX raise also if malformed ProviderDefinition?
- return True
-
- def fetch_definition(self, skip_download=False,
- force_download=False,
- config=None, uri=None,
- domain=None):
- """
- fetches a definition file from server
- """
- # TODO:
- # - Implement diff
- # - overwrite only if different.
- # (attend to serial field different, for instance)
-
- logger.debug('fetching definition')
-
- if skip_download:
- logger.debug('(fetching def skipped)')
- return True
- if config is None:
- config = self.defaultprovider.config
- if uri is None:
- if not domain:
- domain = config.get('provider', None)
- uri = self._get_provider_definition_uri(domain=domain)
-
- if sys.platform == "darwin":
- verify = get_mac_cabundle()
- else:
- verify = True
-
- self.defaultprovider.load(
- from_uri=uri,
- fetcher=self.fetcher,
- verify=verify)
- self.defaultprovider.save()
-
- def fetch_eip_service_config(self, skip_download=False,
- force_download=False,
- config=None, uri=None, # domain=None,
- autocacert=True, verify=True):
- if skip_download:
- return True
- if config is None:
- self.eipserviceconfig.load()
- config = self.eipserviceconfig.config
- if uri is None:
- #XXX
- #if not domain:
- #domain = self.domain or config.get('provider', None)
- uri = self._get_eip_service_uri(
- domain=self.apidomain)
-
- if autocacert and self.cacert is not None:
- verify = self.cacert
-
- self.eipserviceconfig.load(
- from_uri=uri,
- fetcher=self.fetcher,
- force_download=force_download,
- verify=verify)
- self.eipserviceconfig.save()
-
- def check_complete_eip_config(self, config=None):
- # TODO check for gateway
- if config is None:
- config = self.eipconfig.config
- try:
- assert 'provider' in config
- assert config['provider'] is not None
- # XXX assert there is gateway !!
- except AssertionError:
- raise eipexceptions.EIPConfigurationError
-
- # XXX TODO:
- # We should WRITE eip config if missing or
- # incomplete at this point
- #self.eipconfig.save()
-
- #
- # private helpers
- #
-
- def _is_there_default_eipconfig(self):
- return self.eipconfig.exists()
-
- def _dump_default_eipconfig(self):
- self.eipconfig.save(force=True)
-
- def _get_provider_definition_uri(self, domain=None, path=None):
- if domain is None:
- domain = self.domain or baseconstants.DEFAULT_PROVIDER
- if path is None:
- path = baseconstants.DEFINITION_EXPECTED_PATH
- uri = u"https://%s/%s" % (domain, path)
- logger.debug('getting provider definition from %s' % uri)
- return uri
-
- def _get_eip_service_uri(self, domain=None, path=None):
- if domain is None:
- domain = self.domain or baseconstants.DEFAULT_PROVIDER
- if path is None:
- path = eipconstants.EIP_SERVICE_EXPECTED_PATH
- uri = "https://%s/%s" % (domain, path)
- logger.debug('getting eip service file from %s', uri)
- return uri
-
- def set_api_domain(self):
- """sets api domain from defaultprovider config object"""
- api = self.defaultprovider.config.get('api_uri', None)
- # the caller is responsible for having loaded the config
- # object at this point
- if api:
- api_dom = get_https_domain_and_port(api)
- self.apidomain = "%s:%s" % api_dom
-
- def get_api_domain(self):
- """gets api domain"""
- return self.apidomain
diff --git a/src/leap/eip/config.py b/src/leap/eip/config.py
deleted file mode 100644
index 917871da..00000000
--- a/src/leap/eip/config.py
+++ /dev/null
@@ -1,398 +0,0 @@
-import logging
-import os
-import platform
-import re
-import tempfile
-
-from leap import __branding as BRANDING
-from leap import certs
-from leap.util.misc import null_check
-from leap.util.fileutil import (which, mkdir_p, check_and_fix_urw_only)
-
-from leap.base import config as baseconfig
-from leap.baseapp.permcheck import (is_pkexec_in_system,
- is_auth_agent_running)
-from leap.eip import exceptions as eip_exceptions
-from leap.eip import specs as eipspecs
-
-logger = logging.getLogger(name=__name__)
-provider_ca_file = BRANDING.get('provider_ca_file', None)
-
-_platform = platform.system()
-
-
-class EIPConfig(baseconfig.JSONLeapConfig):
- spec = eipspecs.eipconfig_spec
-
- def _get_slug(self):
- eipjsonpath = baseconfig.get_config_file(
- 'eip.json')
- return eipjsonpath
-
- def _set_slug(self, *args, **kwargs):
- raise AttributeError("you cannot set slug")
-
- slug = property(_get_slug, _set_slug)
-
-
-class EIPServiceConfig(baseconfig.JSONLeapConfig):
- spec = eipspecs.eipservice_config_spec
-
- def _get_slug(self):
- domain = getattr(self, 'domain', None)
- if domain:
- path = baseconfig.get_provider_path(domain)
- else:
- path = baseconfig.get_default_provider_path()
- return baseconfig.get_config_file(
- 'eip-service.json', folder=path)
-
- def _set_slug(self):
- raise AttributeError("you cannot set slug")
-
- slug = property(_get_slug, _set_slug)
-
-
-def get_socket_path():
- socket_path = os.path.join(
- tempfile.mkdtemp(prefix="leap-tmp"),
- 'openvpn.socket')
- #logger.debug('socket path: %s', socket_path)
- return socket_path
-
-
-def get_eip_gateway(eipconfig=None, eipserviceconfig=None):
- """
- return the first host in eip service config
- that matches the name defined in the eip.json config
- file.
- """
- # XXX eventually we should move to a more clever
- # gateway selection. maybe we could return
- # all gateways that match our cluster.
-
- null_check(eipconfig, "eipconfig")
- null_check(eipserviceconfig, "eipserviceconfig")
- PLACEHOLDER = "testprovider.example.org"
-
- conf = eipconfig.config
- eipsconf = eipserviceconfig.config
-
- primary_gateway = conf.get('primary_gateway', None)
- if not primary_gateway:
- return PLACEHOLDER
-
- gateways = eipsconf.get('gateways', None)
- if not gateways:
- logger.error('missing gateways in eip service config')
- return PLACEHOLDER
-
- if len(gateways) > 0:
- for gw in gateways:
- clustername = gw.get('cluster', None)
- if not clustername:
- logger.error('no cluster name')
- return
-
- if clustername == primary_gateway:
- # XXX at some moment, we must
- # make this a more generic function,
- # and return ports, protocols...
- ipaddress = gw.get('ip_address', None)
- if not ipaddress:
- logger.error('no ip_address')
- return
- return ipaddress
- logger.error('could not find primary gateway in provider'
- 'gateway list')
-
-
-def get_cipher_options(eipserviceconfig=None):
- """
- gathers optional cipher options from eip-service config.
- :param eipserviceconfig: EIPServiceConfig instance
- """
- null_check(eipserviceconfig, 'eipserviceconfig')
- eipsconf = eipserviceconfig.get_config()
-
- ALLOWED_KEYS = ("auth", "cipher", "tls-cipher")
- CIPHERS_REGEX = re.compile("[A-Z0-9\-]+")
- opts = []
- if 'openvpn_configuration' in eipsconf:
- config = eipserviceconfig.config.get(
- "openvpn_configuration", {})
- for key, value in config.items():
- if key in ALLOWED_KEYS and value is not None:
- sanitized_val = CIPHERS_REGEX.findall(value)
- if len(sanitized_val) != 0:
- _val = sanitized_val[0]
- opts.append('--%s' % key)
- opts.append('%s' % _val)
- return opts
-
-LINUX_UP_DOWN_SCRIPT = "/etc/leap/resolv-update"
-OPENVPN_DOWN_ROOT = "/usr/lib/openvpn/openvpn-down-root.so"
-
-
-def has_updown_scripts():
- """
- checks the existence of the up/down scripts
- """
- # XXX should check permissions too
- is_file = os.path.isfile(LINUX_UP_DOWN_SCRIPT)
- if not is_file:
- logger.warning(
- "Could not find up/down scripts at %s! "
- "Risk of DNS Leaks!!!")
- return is_file
-
-
-def build_ovpn_options(daemon=False, socket_path=None, **kwargs):
- """
- build a list of options
- to be passed in the
- openvpn invocation
- @rtype: list
- @rparam: options
- """
- # XXX review which of the
- # options we don't need.
-
- # TODO pass also the config file,
- # since we will need to take some
- # things from there if present.
-
- provider = kwargs.pop('provider', None)
- eipconfig = EIPConfig(domain=provider)
- eipconfig.load()
- eipserviceconfig = EIPServiceConfig(domain=provider)
- eipserviceconfig.load()
-
- # get user/group name
- # also from config.
- user = baseconfig.get_username()
- group = baseconfig.get_groupname()
-
- opts = []
-
- opts.append('--client')
-
- opts.append('--dev')
- # XXX same in win?
- opts.append('tun')
- opts.append('--persist-tun')
- opts.append('--persist-key')
-
- verbosity = kwargs.get('ovpn_verbosity', None)
- if verbosity and 1 <= verbosity <= 6:
- opts.append('--verb')
- opts.append("%s" % verbosity)
-
- # remote ##############################
- # (server, port, protocol)
-
- opts.append('--remote')
-
- gw = get_eip_gateway(eipconfig=eipconfig,
- eipserviceconfig=eipserviceconfig)
- logger.debug('setting eip gateway to %s', gw)
- opts.append(str(gw))
-
- # get port/protocol from eipservice too
- opts.append('1194')
- #opts.append('80')
- opts.append('udp')
-
- opts.append('--tls-client')
- opts.append('--remote-cert-tls')
- opts.append('server')
-
- # get ciphers #######################
-
- ciphers = get_cipher_options(
- eipserviceconfig=eipserviceconfig)
- for cipheropt in ciphers:
- opts.append(str(cipheropt))
-
- # set user and group
- opts.append('--user')
- opts.append('%s' % user)
- opts.append('--group')
- opts.append('%s' % group)
-
- opts.append('--management-client-user')
- opts.append('%s' % user)
- opts.append('--management-signal')
-
- # set default options for management
- # interface. unix sockets or telnet interface for win.
- # XXX take them from the config object.
-
- if _platform == "Windows":
- opts.append('--management')
- opts.append('localhost')
- # XXX which is a good choice?
- opts.append('7777')
-
- if _platform in ("Linux", "Darwin"):
- opts.append('--management')
-
- if socket_path is None:
- socket_path = get_socket_path()
- opts.append(socket_path)
- opts.append('unix')
-
- opts.append('--script-security')
- opts.append('2')
-
- if _platform == "Linux":
- if has_updown_scripts():
- opts.append("--up")
- opts.append(LINUX_UP_DOWN_SCRIPT)
- opts.append("--down")
- opts.append(LINUX_UP_DOWN_SCRIPT)
- opts.append("--plugin")
- opts.append(OPENVPN_DOWN_ROOT)
- opts.append("'script_type=down %s'" % LINUX_UP_DOWN_SCRIPT)
-
- # certs
- client_cert_path = eipspecs.client_cert_path(provider)
- ca_cert_path = eipspecs.provider_ca_path(provider)
-
- # XXX FIX paths for MAC
- opts.append('--cert')
- opts.append(client_cert_path)
- opts.append('--key')
- opts.append(client_cert_path)
- opts.append('--ca')
- opts.append(ca_cert_path)
-
- # we cannot run in daemon mode
- # with the current subp setting.
- # see: https://leap.se/code/issues/383
- #if daemon is True:
- #opts.append('--daemon')
-
- logger.debug('vpn options: %s', ' '.join(opts))
- return opts
-
-
-def build_ovpn_command(debug=False, do_pkexec_check=True, vpnbin=None,
- socket_path=None, **kwargs):
- """
- build a string with the
- complete openvpn invocation
-
- @rtype [string, [list of strings]]
- @rparam: a list containing the command string
- and a list of options.
- """
- command = []
- use_pkexec = True
- ovpn = None
-
- # XXX get use_pkexec from config instead.
-
- if _platform == "Linux" and use_pkexec and do_pkexec_check:
-
- # check for both pkexec
- # AND a suitable authentication
- # agent running.
- logger.info('use_pkexec set to True')
-
- if not is_pkexec_in_system():
- logger.error('no pkexec in system')
- raise eip_exceptions.EIPNoPkexecAvailable
-
- if not is_auth_agent_running():
- logger.warning(
- "no polkit auth agent found. "
- "pkexec will use its own text "
- "based authentication agent. "
- "that's probably a bad idea")
- raise eip_exceptions.EIPNoPolkitAuthAgentAvailable
-
- command.append('pkexec')
-
- if vpnbin is None:
- if _platform == "Darwin":
- # XXX Should hardcode our installed path
- # /Applications/LEAPClient.app/Contents/Resources/openvpn.leap
- openvpn_bin = "openvpn.leap"
- else:
- openvpn_bin = "openvpn"
- #XXX hardcode for darwin
- ovpn = which(openvpn_bin)
- else:
- ovpn = vpnbin
- if ovpn:
- vpn_command = ovpn
- else:
- vpn_command = "openvpn"
- command.append(vpn_command)
- daemon_mode = not debug
-
- for opt in build_ovpn_options(daemon=daemon_mode, socket_path=socket_path,
- **kwargs):
- command.append(opt)
-
- # XXX check len and raise proper error
-
- if _platform == "Darwin":
- OSX_ASADMIN = 'do shell script "%s" with administrator privileges'
- # XXX fix workaround for Nones
- _command = [x if x else " " for x in command]
- # XXX debugging!
- # XXX get openvpn log path from debug flags
- _command.append('--log')
- _command.append('/tmp/leap_openvpn.log')
- return ["osascript", ["-e", OSX_ASADMIN % ' '.join(_command)]]
- else:
- return [command[0], command[1:]]
-
-
-def check_vpn_keys(provider=None):
- """
- performs an existance and permission check
- over the openvpn keys file.
- Currently we're expecting a single file
- per provider, containing the CA cert,
- the provider key, and our client certificate
- """
- assert provider is not None
- provider_ca = eipspecs.provider_ca_path(provider)
- client_cert = eipspecs.client_cert_path(provider)
-
- logger.debug('provider ca = %s', provider_ca)
- logger.debug('client cert = %s', client_cert)
-
- # if no keys, raise error.
- # it's catched by the ui and signal user.
-
- if not os.path.isfile(provider_ca):
- # not there. let's try to copy.
- folder, filename = os.path.split(provider_ca)
- if not os.path.isdir(folder):
- mkdir_p(folder)
- if provider_ca_file:
- cacert = certs.where(provider_ca_file)
- with open(provider_ca, 'w') as pca:
- with open(cacert, 'r') as cac:
- pca.write(cac.read())
-
- if not os.path.isfile(provider_ca):
- logger.error('key file %s not found. aborting.',
- provider_ca)
- raise eip_exceptions.EIPInitNoKeyFileError
-
- if not os.path.isfile(client_cert):
- logger.error('key file %s not found. aborting.',
- client_cert)
- raise eip_exceptions.EIPInitNoKeyFileError
-
- for keyfile in (provider_ca, client_cert):
- # bad perms? try to fix them
- try:
- check_and_fix_urw_only(keyfile)
- except OSError:
- raise eip_exceptions.EIPInitBadKeyFilePermError
diff --git a/src/leap/eip/constants.py b/src/leap/eip/constants.py
deleted file mode 100644
index 9af5a947..00000000
--- a/src/leap/eip/constants.py
+++ /dev/null
@@ -1,3 +0,0 @@
-# not used anymore with the new JSONConfig.slug
-EIP_CONFIG = "eip.json"
-EIP_SERVICE_EXPECTED_PATH = "1/config/eip-service.json"
diff --git a/src/leap/eip/eipconnection.py b/src/leap/eip/eipconnection.py
deleted file mode 100644
index d012c567..00000000
--- a/src/leap/eip/eipconnection.py
+++ /dev/null
@@ -1,405 +0,0 @@
-"""
-EIP Connection Class
-"""
-from __future__ import (absolute_import,)
-import logging
-import Queue
-import sys
-import time
-
-from dateutil.parser import parse as dateparse
-
-from leap.eip.checks import ProviderCertChecker
-from leap.eip.checks import EIPConfigChecker
-from leap.eip import config as eipconfig
-from leap.eip import exceptions as eip_exceptions
-from leap.eip.openvpnconnection import OpenVPNConnection
-
-logger = logging.getLogger(name=__name__)
-
-
-class StatusMixIn(object):
-
- # a bunch of methods related with querying the connection
- # state/status and displaying useful info.
- # Needs to get clear on what is what, and
- # separate functions.
- # Should separate EIPConnectionStatus (self.status)
- # from the OpenVPN state/status command and parsing.
-
- ERR_CONNREFUSED = False
-
- def connection_state(self):
- """
- returns the current connection state
- """
- return self.status.current
-
- def get_icon_name(self):
- """
- get icon name from status object
- """
- return self.status.get_state_icon()
-
- def get_leap_status(self):
- return self.status.get_leap_status()
-
- def poll_connection_state(self):
- """
- """
- try:
- state = self.get_connection_state()
- except eip_exceptions.ConnectionRefusedError:
- # connection refused. might be not ready yet.
- if not self.ERR_CONNREFUSED:
- logger.warning('connection refused')
- self.ERR_CONNREFUSED = True
- return
- if not state:
- #logger.debug('no state')
- return
- (ts, status_step,
- ok, ip, remote) = state
- self.status.set_vpn_state(status_step)
- status_step = self.status.get_readable_status()
- return (ts, status_step, ok, ip, remote)
-
- def make_error(self):
- """
- capture error and wrap it in an
- understandable format
- """
- # mostly a hack to display errors in the debug UI
- # w/o breaking the polling.
- #XXX get helpful error codes
- self.with_errors = True
- now = int(time.time())
- return '%s,LAUNCHER ERROR,ERROR,-,-' % now
-
- def state(self):
- """
- Sends OpenVPN command: state
- """
- state = self._send_command("state")
- if not state:
- return None
- if isinstance(state, str):
- return state
- if isinstance(state, list):
- if len(state) == 1:
- return state[0]
- else:
- return state[-1]
-
- def vpn_status(self):
- """
- OpenVPN command: status
- """
- status = self._send_command("status")
- return status
-
- def vpn_status2(self):
- """
- OpenVPN command: last 2 statuses
- """
- return self._send_command("status 2")
-
- #
- # parse info as the UI expects
- #
-
- def get_status_io(self):
- status = self.vpn_status()
- if isinstance(status, str):
- lines = status.split('\n')
- if isinstance(status, list):
- lines = status
- try:
- (header, when, tun_read, tun_write,
- tcp_read, tcp_write, auth_read) = tuple(lines)
- except ValueError:
- return None
-
- when_ts = dateparse(when.split(',')[1]).timetuple()
- sep = ','
- # XXX clean up this!
- tun_read = tun_read.split(sep)[1]
- tun_write = tun_write.split(sep)[1]
- tcp_read = tcp_read.split(sep)[1]
- tcp_write = tcp_write.split(sep)[1]
- auth_read = auth_read.split(sep)[1]
-
- # XXX this could be a named tuple. prettier.
- return when_ts, (tun_read, tun_write, tcp_read, tcp_write, auth_read)
-
- def get_connection_state(self):
- state = self.state()
- if state is not None:
- ts, status_step, ok, ip, remote = state.split(',')
- ts = time.gmtime(float(ts))
- # XXX this could be a named tuple. prettier.
- return ts, status_step, ok, ip, remote
-
-
-class EIPConnection(OpenVPNConnection, StatusMixIn):
- """
- Aka conductor.
- Manages the execution of the OpenVPN process, auto starts, monitors the
- network connection, handles configuration, fixes leaky hosts, handles
- errors, etc.
- Status updates (connected, bandwidth, etc) are signaled to the GUI.
- """
-
- # XXX change name to EIPConductor ??
-
- def __init__(self,
- provider_cert_checker=ProviderCertChecker,
- config_checker=EIPConfigChecker,
- *args, **kwargs):
- #self.settingsfile = kwargs.get('settingsfile', None)
- #self.logfile = kwargs.get('logfile', None)
- self.provider = kwargs.pop('provider', None)
- self._providercertchecker = provider_cert_checker
- self._configchecker = config_checker
-
- self.error_queue = Queue.Queue()
-
- status_signals = kwargs.pop('status_signals', None)
- self.status = EIPConnectionStatus(callbacks=status_signals)
-
- checker_signals = kwargs.pop('checker_signals', None)
- self.checker_signals = checker_signals
-
- self.init_checkers()
-
- host = eipconfig.get_socket_path()
- kwargs['host'] = host
-
- super(EIPConnection, self).__init__(*args, **kwargs)
-
- def connect(self, **kwargs):
- """
- entry point for connection process
- """
- # in OpenVPNConnection
- self.try_openvpn_connection()
-
- def disconnect(self, shutdown=False):
- """
- disconnects client
- """
- self.terminate_openvpn_connection(shutdown=shutdown)
- self.status.change_to(self.status.DISCONNECTED)
-
- def has_errors(self):
- return True if self.error_queue.qsize() != 0 else False
-
- def init_checkers(self):
- """
- initialize checkers
- """
- self.provider_cert_checker = self._providercertchecker(
- domain=self.provider)
- self.config_checker = self._configchecker(domain=self.provider)
-
- def set_provider_domain(self, domain):
- """
- sets the provider domain.
- used from the first run wizard when we launch the run_checks
- and connect process after having initialized the conductor.
- """
- # This looks convoluted, right.
- # We have to reinstantiate checkers cause we're passing
- # the domain param that we did not know at the beginning
- # (only for the firstrunwizard case)
- self.provider = domain
- self.init_checkers()
-
- def run_checks(self, skip_download=False, skip_verify=False):
- """
- run all eip checks previous to attempting a connection
- """
- logger.debug('running conductor checks')
-
- def push_err(exc):
- # keep the original traceback!
- exc_traceback = sys.exc_info()[2]
- self.error_queue.put((exc, exc_traceback))
-
- try:
- # network (1)
- if self.checker_signals:
- for signal in self.checker_signals:
- signal('checking encryption keys')
- self.provider_cert_checker.run_all(skip_verify=skip_verify)
- except Exception as exc:
- push_err(exc)
- try:
- if self.checker_signals:
- for signal in self.checker_signals:
- signal('checking provider config')
- self.config_checker.run_all(skip_download=skip_download)
- except Exception as exc:
- push_err(exc)
- try:
- self.run_openvpn_checks()
- except Exception as exc:
- push_err(exc)
-
-
-class EIPConnectionStatus(object):
- """
- Keep track of client (gui) and openvpn
- states.
-
- These are the OpenVPN states:
- CONNECTING -- OpenVPN's initial state.
- WAIT -- (Client only) Waiting for initial response
- from server.
- AUTH -- (Client only) Authenticating with server.
- GET_CONFIG -- (Client only) Downloading configuration options
- from server.
- ASSIGN_IP -- Assigning IP address to virtual network
- interface.
- ADD_ROUTES -- Adding routes to system.
- CONNECTED -- Initialization Sequence Completed.
- RECONNECTING -- A restart has occurred.
- EXITING -- A graceful exit is in progress.
-
- We add some extra states:
-
- DISCONNECTED -- GUI initial state.
- UNRECOVERABLE -- An unrecoverable error has been raised
- while invoking openvpn service.
- """
- CONNECTING = 1
- WAIT = 2
- AUTH = 3
- GET_CONFIG = 4
- ASSIGN_IP = 5
- ADD_ROUTES = 6
- CONNECTED = 7
- RECONNECTING = 8
- EXITING = 9
-
- # gui specific states:
- UNRECOVERABLE = 11
- DISCONNECTED = 0
-
- def __init__(self, callbacks=None):
- """
- EIPConnectionStatus is initialized with a tuple
- of signals to be triggered.
- :param callbacks: a tuple of (callable) observers
- :type callbacks: tuple
- """
- self.current = self.DISCONNECTED
- self.previous = None
- # (callbacks to connect to signals in Qt-land)
- self.callbacks = callbacks
-
- def get_readable_status(self):
- # XXX DRY status / labels a little bit.
- # think we'll want to i18n this.
- human_status = {
- 0: 'disconnected',
- 1: 'connecting',
- 2: 'waiting',
- 3: 'authenticating',
- 4: 'getting config',
- 5: 'assigning ip',
- 6: 'adding routes',
- 7: 'connected',
- 8: 'reconnecting',
- 9: 'exiting',
- 11: 'unrecoverable error',
- }
- return human_status[self.current]
-
- def get_leap_status(self):
- # XXX improve nomenclature
- leap_status = {
- 0: 'disconnected',
- 1: 'connecting to gateway',
- 2: 'connecting to gateway',
- 3: 'authenticating',
- 4: 'establishing network encryption',
- 5: 'establishing network encryption',
- 6: 'establishing network encryption',
- 7: 'connected',
- 8: 'reconnecting',
- 9: 'exiting',
- 11: 'unrecoverable error',
- }
- return leap_status[self.current]
-
- def get_state_icon(self):
- """
- returns the high level icon
- for each fine-grain openvpn state
- """
- connecting = (self.CONNECTING,
- self.WAIT,
- self.AUTH,
- self.GET_CONFIG,
- self.ASSIGN_IP,
- self.ADD_ROUTES)
- connected = (self.CONNECTED,)
- disconnected = (self.DISCONNECTED,
- self.UNRECOVERABLE)
-
- # this can be made smarter,
- # but it's like it'll change,
- # so +readability.
-
- if self.current in connecting:
- return "connecting"
- if self.current in connected:
- return "connected"
- if self.current in disconnected:
- return "disconnected"
-
- def set_vpn_state(self, status):
- """
- accepts a state string from the management
- interface, and sets the internal state.
- :param status: openvpn STATE (uppercase).
- :type status: str
- """
- if hasattr(self, status):
- self.change_to(getattr(self, status))
-
- def set_current(self, to):
- """
- setter for the 'current' property
- :param to: destination state
- :type to: int
- """
- self.current = to
-
- def change_to(self, to):
- """
- :param to: destination state
- :type to: int
- """
- if to == self.current:
- return
- changed = False
- from_ = self.current
- self.current = to
-
- # We can add transition restrictions
- # here to ensure no transitions are
- # allowed outside the fsm.
-
- self.set_current(to)
- changed = True
-
- #trigger signals (as callbacks)
- #print('current state: %s' % self.current)
- if changed:
- self.previous = from_
- if self.callbacks:
- for cb in self.callbacks:
- if callable(cb):
- cb(self)
diff --git a/src/leap/eip/exceptions.py b/src/leap/eip/exceptions.py
deleted file mode 100644
index b7d398c3..00000000
--- a/src/leap/eip/exceptions.py
+++ /dev/null
@@ -1,175 +0,0 @@
-"""
-Generic error hierarchy
-Leap/EIP exceptions used for exception handling,
-logging, and notifying user of errors
-during leap operation.
-
-Exception hierarchy
--------------------
-All EIP Errors must inherit from EIPClientError (note: move that to
-a more generic LEAPClientBaseError).
-
-Exception attributes and their meaning/uses
--------------------------------------------
-
-* critical: if True, will abort execution prematurely,
- after attempting any cleaning
- action.
-
-* failfirst: breaks any error_check loop that is examining
- the error queue.
-
-* message: the message that will be used in the __repr__ of the exception.
-
-* usermessage: the message that will be passed to user in ErrorDialogs
- in Qt-land.
-
-TODO:
-
-* EIPClientError:
- Should inherit from LeapException
-
-* gettext / i18n for user messages.
-
-"""
-from leap.base.exceptions import LeapException
-from leap.util.translations import translate
-
-
-# This should inherit from LeapException
-class EIPClientError(Exception):
- """
- base EIPClient exception
- """
- critical = False
- failfirst = False
- warning = False
-
-
-class CriticalError(EIPClientError):
- """
- we cannot do anything about it, sorry
- """
- critical = True
- failfirst = True
-
-
-class Warning(EIPClientError):
- """
- just that, warnings
- """
- warning = True
-
-
-class EIPNoPolkitAuthAgentAvailable(CriticalError):
- message = "No polkit authentication agent could be found"
- usermessage = translate(
- "EIPErrors",
- "We could not find any authentication "
- "agent in your system.<br/>"
- "Make sure you have "
- "<b>polkit-gnome-authentication-agent-1</b> "
- "running and try again.")
-
-
-class EIPNoPkexecAvailable(Warning):
- message = "No pkexec binary found"
- usermessage = translate(
- "EIPErrors",
- "We could not find <b>pkexec</b> in your "
- "system.<br/> Do you want to try "
- "<b>setuid workaround</b>? "
- "(<i>DOES NOTHING YET</i>)")
- failfirst = True
-
-
-class EIPNoCommandError(EIPClientError):
- message = "no suitable openvpn command found"
- usermessage = translate(
- "EIPErrors",
- "No suitable openvpn command found. "
- "<br/>(Might be a permissions problem)")
-
-
-class EIPBadCertError(Warning):
- # XXX this should be critical and fail close
- message = "cert verification failed"
- usermessage = translate(
- "EIPErrors",
- "there is a problem with provider certificate")
-
-
-class LeapBadConfigFetchedError(Warning):
- message = "provider sent a malformed json file"
- usermessage = translate(
- "EIPErrors",
- "an error occurred during configuratio of leap services")
-
-
-class OpenVPNAlreadyRunning(CriticalError):
- message = "Another OpenVPN Process is already running."
- usermessage = translate(
- "EIPErrors",
- "Another OpenVPN Process has been detected. "
- "Please close it before starting leap-client")
-
-
-class HttpsNotSupported(LeapException):
- message = "connection refused while accessing via https"
- usermessage = translate(
- "EIPErrors",
- "Server does not allow secure connections")
-
-
-class HttpsBadCertError(LeapException):
- message = "verification error on cert"
- usermessage = translate(
- "EIPErrors",
- "Server certificate could not be verified")
-
-#
-# errors still needing some love
-#
-
-
-class EIPInitNoKeyFileError(CriticalError):
- message = "No vpn keys found in the expected path"
- usermessage = translate(
- "EIPErrors",
- "We could not find your eip certs in the expected path")
-
-
-class EIPInitBadKeyFilePermError(Warning):
- # I don't know if we should be telling user or not,
- # we try to fix permissions and should only re-raise
- # if permission check failed.
- pass
-
-
-class EIPInitNoProviderError(EIPClientError):
- pass
-
-
-class EIPInitBadProviderError(EIPClientError):
- pass
-
-
-class EIPConfigurationError(EIPClientError):
- pass
-
-#
-# Errors that probably we don't need anymore
-# chase down for them and check.
-#
-
-
-class MissingSocketError(Exception):
- pass
-
-
-class ConnectionRefusedError(Exception):
- pass
-
-
-class EIPMissingDefaultProvider(Exception):
- pass
diff --git a/src/leap/eip/openvpnconnection.py b/src/leap/eip/openvpnconnection.py
deleted file mode 100644
index 455735c8..00000000
--- a/src/leap/eip/openvpnconnection.py
+++ /dev/null
@@ -1,410 +0,0 @@
-"""
-OpenVPN Connection
-"""
-from __future__ import (print_function)
-from functools import partial
-import logging
-import os
-import psutil
-import shutil
-import select
-import socket
-from time import sleep
-
-logger = logging.getLogger(name=__name__)
-
-from leap.base.connection import Connection
-from leap.base.constants import OPENVPN_BIN
-from leap.util.coroutines import spawn_and_watch_process
-from leap.util.misc import get_openvpn_pids
-
-from leap.eip.udstelnet import UDSTelnet
-from leap.eip import config as eip_config
-from leap.eip import exceptions as eip_exceptions
-
-
-class OpenVPNManagement(object):
-
- # TODO explain a little bit how management interface works
- # and our telnet interface with support for unix sockets.
-
- """
- for more information, read openvpn management notes.
- zcat `dpkg -L openvpn | grep management`
- """
-
- def _connect_to_management(self):
- """
- Connect to openvpn management interface
- """
- if hasattr(self, 'tn'):
- self._close_management_socket()
- self.tn = UDSTelnet(self.host, self.port)
-
- # XXX make password optional
- # specially for win. we should generate
- # the pass on the fly when invoking manager
- # from conductor
-
- #self.tn.read_until('ENTER PASSWORD:', 2)
- #self.tn.write(self.password + '\n')
- #self.tn.read_until('SUCCESS:', 2)
- if self.tn:
- self._seek_to_eof()
- return True
-
- def _close_management_socket(self, announce=True):
- """
- Close connection to openvpn management interface
- """
- logger.debug('closing socket')
- if announce:
- self.tn.write("quit\n")
- self.tn.read_all()
- self.tn.get_socket().close()
- del self.tn
-
- def _seek_to_eof(self):
- """
- Read as much as available. Position seek pointer to end of stream
- """
- try:
- b = self.tn.read_eager()
- except EOFError:
- logger.debug("Could not read from socket. Assuming it died.")
- return
- while b:
- try:
- b = self.tn.read_eager()
- except EOFError:
- logger.debug("Could not read from socket. Assuming it died.")
-
- def _send_command(self, cmd):
- """
- Send a command to openvpn and return response as list
- """
- if not self.connected():
- try:
- self._connect_to_management()
- except eip_exceptions.MissingSocketError:
- #logger.warning('missing management socket')
- return []
- try:
- if hasattr(self, 'tn'):
- self.tn.write(cmd + "\n")
- except socket.error:
- logger.error('socket error')
- self._close_management_socket(announce=False)
- return []
- try:
- buf = self.tn.read_until(b"END", 2)
- self._seek_to_eof()
- blist = buf.split('\r\n')
- if blist[-1].startswith('END'):
- del blist[-1]
- return blist
- else:
- return []
- except socket.error as exc:
- logger.debug('socket error: %s' % exc.message)
- except select.error as exc:
- logger.debug('select error: %s' % exc.message)
-
- def _send_short_command(self, cmd):
- """
- parse output from commands that are
- delimited by "success" instead
- """
- if not self.connected():
- self.connect()
- self.tn.write(cmd + "\n")
- # XXX not working?
- buf = self.tn.read_until(b"SUCCESS", 2)
- self._seek_to_eof()
- blist = buf.split('\r\n')
- return blist
-
- #
- # random maybe useful vpn commands
- #
-
- def pid(self):
- #XXX broken
- return self._send_short_command("pid")
-
-
-class OpenVPNConnection(Connection, OpenVPNManagement):
- """
- All related to invocation
- of the openvpn binary.
- It's extended by EIPConnection.
- """
-
- # XXX Inheriting from Connection was an early design idea
- # but currently that's an empty class.
- # We can get rid of that if we don't use it for sharing
- # state with other leap modules.
-
- def __init__(self,
- watcher_cb=None,
- debug=False,
- host=None,
- port="unix",
- password=None,
- *args, **kwargs):
- """
- :param watcher_cb: callback to be \
-called for each line in watched stdout
- :param signal_map: dictionary of signal names and callables \
-to be triggered for each one of them.
- :type watcher_cb: function
- :type signal_map: dict
- """
- #XXX FIXME
- #change watcher_cb to line_observer
- # XXX if not host: raise ImproperlyConfigured
-
- logger.debug('init openvpn connection')
- self.debug = debug
- self.ovpn_verbosity = kwargs.get('ovpn_verbosity', None)
-
- self.watcher_cb = watcher_cb
- #self.signal_maps = signal_maps
-
- self.subp = None
- self.watcher = None
-
- self.server = None
- self.port = None
- self.proto = None
-
- self.command = None
- self.args = None
-
- # XXX get autostart from config
- self.autostart = True
-
- # management interface init
- self.host = host
- if isinstance(port, str) and port.isdigit():
- port = int(port)
- elif port == "unix":
- port = "unix"
- else:
- port = None
- self.port = port
- self.password = password
-
- def run_openvpn_checks(self):
- """
- runs check needed before launching
- openvpn subprocess. will raise if errors found.
- """
- logger.debug('running openvpn checks')
- # XXX I think that "check_if_running" should be called
- # from try openvpn connection instead. -- kali.
- # let's prepare tests for that before changing it...
- self._check_if_running_instance()
- self._set_ovpn_command()
- self._check_vpn_keys()
-
- def try_openvpn_connection(self):
- """
- attempts to connect
- """
- # XXX should make public method
- if self.command is None:
- raise eip_exceptions.EIPNoCommandError
- if self.subp is not None:
- logger.debug('cowardly refusing to launch subprocess again')
- # XXX this is not returning ???!!
- # FIXME -- so it's calling it all the same!!
-
- self._launch_openvpn()
-
- def connected(self):
- """
- Returns True if connected
- rtype: bool
- """
- # XXX make a property
- return hasattr(self, 'tn')
-
- def terminate_openvpn_connection(self, shutdown=False):
- """
- terminates openvpn child subprocess
- """
- if self.subp:
- try:
- self._stop_openvpn()
- except eip_exceptions.ConnectionRefusedError:
- logger.warning(
- 'unable to send sigterm signal to openvpn: '
- 'connection refused.')
-
- # XXX kali --
- # XXX review-me
- # I think this will block if child process
- # does not return.
- # Maybe we can .poll() for a given
- # interval and exit in any case.
-
- RETCODE = self.subp.wait()
- if RETCODE:
- logger.error(
- 'cannot terminate subprocess! Retcode %s'
- '(We might have left openvpn running)' % RETCODE)
-
- if shutdown:
- self._cleanup_tempfiles()
-
- def _cleanup_tempfiles(self):
- """
- remove all temporal files
- we might have left behind
- """
- # if self.port is 'unix', we have
- # created a temporal socket path that, under
- # normal circumstances, we should be able to
- # delete
-
- if self.port == "unix":
- logger.debug('cleaning socket file temp folder')
-
- tempfolder = os.path.split(self.host)[0]
- if os.path.isdir(tempfolder):
- try:
- shutil.rmtree(tempfolder)
- except OSError:
- logger.error('could not delete tmpfolder %s' % tempfolder)
-
- # checks
-
- def _check_if_running_instance(self):
- """
- check if openvpn is already running
- """
- openvpn_pids = get_openvpn_pids()
- if openvpn_pids:
- logger.debug('an openvpn instance is already running.')
- logger.debug('attempting to stop openvpn instance.')
- if not self._stop_openvpn():
- raise eip_exceptions.OpenVPNAlreadyRunning
- return
- else:
- logger.debug('no openvpn instance found.')
-
- def _set_ovpn_command(self):
- try:
- command, args = eip_config.build_ovpn_command(
- provider=self.provider,
- debug=self.debug,
- socket_path=self.host,
- ovpn_verbosity=self.ovpn_verbosity)
- except eip_exceptions.EIPNoPolkitAuthAgentAvailable:
- command = args = None
- raise
- except eip_exceptions.EIPNoPkexecAvailable:
- command = args = None
- raise
-
- # XXX if not command, signal error.
- self.command = command
- self.args = args
-
- def _check_vpn_keys(self):
- """
- checks for correct permissions on vpn keys
- """
- try:
- eip_config.check_vpn_keys(provider=self.provider)
- except eip_exceptions.EIPInitBadKeyFilePermError:
- logger.error('Bad VPN Keys permission!')
- # do nothing now
- # and raise the rest ...
-
- # starting and stopping openvpn subprocess
-
- def _launch_openvpn(self):
- """
- invocation of openvpn binaries in a subprocess.
- """
- #XXX TODO:
- #deprecate watcher_cb,
- #use _only_ signal_maps instead
-
- #logger.debug('_launch_openvpn called')
- if self.watcher_cb is not None:
- linewrite_callback = self.watcher_cb
- else:
- #XXX get logger instead
- linewrite_callback = lambda line: logger.debug(
- 'watcher: %s' % line)
-
- # the partial is not
- # being applied now because we're not observing the process
- # stdout like we did in the early stages. but I leave it
- # here since it will be handy for observing patterns in the
- # thru-the-manager updates (with regex)
- observers = (linewrite_callback,
- partial(lambda con_status,
- line: linewrite_callback, self.status))
- subp, watcher = spawn_and_watch_process(
- self.command,
- self.args,
- observers=observers)
- self.subp = subp
- self.watcher = watcher
-
- def _stop_openvpn(self):
- """
- stop openvpn process
- by sending SIGTERM to the management
- interface
- """
- # XXX method a bit too long, split
- logger.debug("atempting to terminate openvpn process...")
- if self.connected():
- try:
- self._send_command("signal SIGTERM\n")
- sleep(1)
- if not self.subp: # XXX ???
- return True
- except socket.error:
- logger.warning('management socket died')
- return
-
- #shutting openvpn failured
- #try patching in old openvpn host and trying again
- # XXX could be more than one!
- process = self._get_openvpn_process()
- if process:
- logger.debug('process: %s' % process.name)
- cmdline = process.cmdline
-
- manag_flag = "--management"
- if isinstance(cmdline, list) and manag_flag in cmdline:
- _index = cmdline.index(manag_flag)
- self.host = cmdline[_index + 1]
- self._send_command("signal SIGTERM\n")
-
- #make sure the process was terminated
- process = self._get_openvpn_process()
- if not process:
- logger.debug("Existing OpenVPN Process Terminated")
- return True
- else:
- logger.error("Unable to terminate existing OpenVPN Process.")
- return False
-
- return True
-
- def _get_openvpn_process(self):
- for process in psutil.process_iter():
- if OPENVPN_BIN in process.name:
- return process
- return None
-
- def get_log(self, lines=1):
- log = self._send_command("log %s" % lines)
- return log
diff --git a/src/leap/eip/specs.py b/src/leap/eip/specs.py
deleted file mode 100644
index c41fd29b..00000000
--- a/src/leap/eip/specs.py
+++ /dev/null
@@ -1,136 +0,0 @@
-from __future__ import (unicode_literals)
-import os
-
-from leap import __branding
-from leap.base import config as baseconfig
-
-# XXX move provider stuff to base config
-
-PROVIDER_CA_CERT = __branding.get(
- 'provider_ca_file',
- 'cacert.pem')
-
-provider_ca_path = lambda domain: str(os.path.join(
- #baseconfig.get_default_provider_path(),
- baseconfig.get_provider_path(domain),
- 'keys', 'ca',
- 'cacert.pem'
-)) if domain else None
-
-default_provider_ca_path = lambda: str(os.path.join(
- baseconfig.get_default_provider_path(),
- 'keys', 'ca',
- PROVIDER_CA_CERT
-))
-
-PROVIDER_DOMAIN = __branding.get('provider_domain', 'testprovider.example.org')
-
-
-client_cert_path = lambda domain: unicode(os.path.join(
- baseconfig.get_provider_path(domain),
- 'keys', 'client',
- 'openvpn.pem'
-)) if domain else None
-
-default_client_cert_path = lambda: unicode(os.path.join(
- baseconfig.get_default_provider_path(),
- 'keys', 'client',
- 'openvpn.pem'
-))
-
-eipconfig_spec = {
- 'description': 'sample eipconfig',
- 'type': 'object',
- 'properties': {
- 'provider': {
- 'type': unicode,
- 'default': u"%s" % PROVIDER_DOMAIN,
- 'required': True,
- },
- 'transport': {
- 'type': unicode,
- 'default': u"openvpn",
- },
- 'openvpn_protocol': {
- 'type': unicode,
- 'default': u"tcp"
- },
- 'openvpn_port': {
- 'type': int,
- 'default': 80
- },
- 'openvpn_ca_certificate': {
- 'type': unicode, # path
- 'default': default_provider_ca_path
- },
- 'openvpn_client_certificate': {
- 'type': unicode, # path
- 'default': default_client_cert_path
- },
- 'connect_on_login': {
- 'type': bool,
- 'default': True
- },
- 'block_cleartext_traffic': {
- 'type': bool,
- 'default': True
- },
- 'primary_gateway': {
- 'type': unicode,
- 'default': u"location_unknown",
- #'required': True
- },
- 'secondary_gateway': {
- 'type': unicode,
- 'default': u"location_unknown2"
- },
- 'management_password': {
- 'type': unicode
- }
- }
-}
-
-eipservice_config_spec = {
- 'description': 'sample eip service config',
- 'type': 'object',
- 'properties': {
- 'serial': {
- 'type': int,
- 'required': True,
- 'default': 1
- },
- 'version': {
- 'type': int,
- 'required': True,
- 'default': 1
- },
- 'clusters': {
- 'type': list,
- 'default': [
- {"label": {
- "en": "Location Unknown"},
- "name": "location_unknown"}]
- },
- 'gateways': {
- 'type': list,
- 'default': [
- {"capabilities": {
- "adblock": True,
- "filter_dns": True,
- "ports": ["80", "53", "443", "1194"],
- "protocols": ["udp", "tcp"],
- "transport": ["openvpn"],
- "user_ips": False},
- "cluster": "location_unknown",
- "host": "location.example.org",
- "ip_address": "127.0.0.1"}]
- },
- 'openvpn_configuration': {
- 'type': dict,
- 'default': {
- "auth": None,
- "cipher": None,
- "tls-cipher": None}
- }
- }
-}
diff --git a/src/leap/eip/tests/__init__.py b/src/leap/eip/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/eip/tests/__init__.py
+++ /dev/null
diff --git a/src/leap/eip/tests/data.py b/src/leap/eip/tests/data.py
deleted file mode 100644
index a7fe1853..00000000
--- a/src/leap/eip/tests/data.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from __future__ import unicode_literals
-import os
-
-#from leap import __branding
-
-# sample data used in tests
-
-#PROVIDER = __branding.get('provider_domain')
-PROVIDER = "testprovider.example.org"
-
-EIP_SAMPLE_CONFIG = {
- "provider": "%s" % PROVIDER,
- "transport": "openvpn",
- "openvpn_protocol": "tcp",
- "openvpn_port": 80,
- "openvpn_ca_certificate": os.path.expanduser(
- "~/.config/leap/providers/"
- "%s/"
- "keys/ca/cacert.pem" % PROVIDER),
- "openvpn_client_certificate": os.path.expanduser(
- "~/.config/leap/providers/"
- "%s/"
- "keys/client/openvpn.pem" % PROVIDER),
- "connect_on_login": True,
- "block_cleartext_traffic": True,
- "primary_gateway": "location_unknown",
- "secondary_gateway": "location_unknown2",
- #"management_password": "oph7Que1othahwiech6J"
-}
-
-EIP_SAMPLE_SERVICE = {
- "serial": 1,
- "version": 1,
- "clusters": [
- {"label": {
- "en": "Location Unknown"},
- "name": "location_unknown"}
- ],
- "gateways": [
- {"capabilities": {
- "adblock": True,
- "filter_dns": True,
- "ports": ["80", "53", "443", "1194"],
- "protocols": ["udp", "tcp"],
- "transport": ["openvpn"],
- "user_ips": False},
- "cluster": "location_unknown",
- "host": "location.example.org",
- "ip_address": "192.0.43.10"}
- ]
-}
diff --git a/src/leap/eip/tests/test_checks.py b/src/leap/eip/tests/test_checks.py
deleted file mode 100644
index ab11037a..00000000
--- a/src/leap/eip/tests/test_checks.py
+++ /dev/null
@@ -1,373 +0,0 @@
-from BaseHTTPServer import BaseHTTPRequestHandler
-import copy
-import json
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-import os
-import time
-import urlparse
-
-from mock import (patch, Mock)
-
-import jsonschema
-#import ping
-import requests
-
-from leap.base import config as baseconfig
-from leap.base.constants import (DEFAULT_PROVIDER_DEFINITION,
- DEFINITION_EXPECTED_PATH)
-from leap.eip import checks as eipchecks
-from leap.eip import specs as eipspecs
-from leap.eip import exceptions as eipexceptions
-from leap.eip.tests import data as testdata
-from leap.testing.basetest import BaseLeapTest
-from leap.testing.https_server import BaseHTTPSServerTestCase
-from leap.testing.https_server import where as where_cert
-from leap.util.fileutil import mkdir_f
-
-
-class NoLogRequestHandler:
- def log_message(self, *args):
- # don't write log msg to stderr
- pass
-
- def read(self, n=None):
- return ''
-
-
-class EIPCheckTest(BaseLeapTest):
-
- __name__ = "eip_check_tests"
- provider = "testprovider.example.org"
- maxDiff = None
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- # test methods are there, and can be called from run_all
-
- def test_checker_should_implement_check_methods(self):
- checker = eipchecks.EIPConfigChecker(domain=self.provider)
-
- self.assertTrue(hasattr(checker, "check_default_eipconfig"),
- "missing meth")
- self.assertTrue(hasattr(checker, "check_is_there_default_provider"),
- "missing meth")
- self.assertTrue(hasattr(checker, "fetch_definition"), "missing meth")
- self.assertTrue(hasattr(checker, "fetch_eip_service_config"),
- "missing meth")
- self.assertTrue(hasattr(checker, "check_complete_eip_config"),
- "missing meth")
-
- def test_checker_should_actually_call_all_tests(self):
- checker = eipchecks.EIPConfigChecker(domain=self.provider)
-
- mc = Mock()
- checker.run_all(checker=mc)
- self.assertTrue(mc.check_default_eipconfig.called, "not called")
- self.assertTrue(mc.check_is_there_default_provider.called,
- "not called")
- self.assertTrue(mc.fetch_definition.called,
- "not called")
- self.assertTrue(mc.fetch_eip_service_config.called,
- "not called")
- self.assertTrue(mc.check_complete_eip_config.called,
- "not called")
-
- # test individual check methods
-
- def test_check_default_eipconfig(self):
- checker = eipchecks.EIPConfigChecker(domain=self.provider)
- # no eip config (empty home)
- eipconfig_path = checker.eipconfig.filename
- self.assertFalse(os.path.isfile(eipconfig_path))
- checker.check_default_eipconfig()
- # we've written one, so it should be there.
- self.assertTrue(os.path.isfile(eipconfig_path))
- with open(eipconfig_path, 'rb') as fp:
- deserialized = json.load(fp)
-
- # force re-evaluation of the paths
- # small workaround for evaluating home dirs correctly
- EIP_SAMPLE_CONFIG = copy.copy(testdata.EIP_SAMPLE_CONFIG)
- EIP_SAMPLE_CONFIG['openvpn_client_certificate'] = \
- eipspecs.client_cert_path(self.provider)
- EIP_SAMPLE_CONFIG['openvpn_ca_certificate'] = \
- eipspecs.provider_ca_path(self.provider)
- self.assertEqual(deserialized, EIP_SAMPLE_CONFIG)
-
- # TODO: shold ALSO run validation methods.
-
- def test_check_is_there_default_provider(self):
- checker = eipchecks.EIPConfigChecker(domain=self.provider)
- # we do dump a sample eip config, but lacking a
- # default provider entry.
- # This error will be possible catched in a different
- # place, when JSONConfig does validation of required fields.
-
- # passing direct config
- with self.assertRaises(eipexceptions.EIPMissingDefaultProvider):
- checker.check_is_there_default_provider(config={})
-
- # ok. now, messing with real files...
- # blank out default_provider
- sampleconfig = copy.copy(testdata.EIP_SAMPLE_CONFIG)
- sampleconfig['provider'] = None
- eipcfg_path = checker.eipconfig.filename
- mkdir_f(eipcfg_path)
- with open(eipcfg_path, 'w') as fp:
- json.dump(sampleconfig, fp)
- #with self.assertRaises(eipexceptions.EIPMissingDefaultProvider):
- # XXX we should catch this as one of our errors, but do not
- # see how to do it quickly.
- with self.assertRaises(jsonschema.ValidationError):
- #import ipdb;ipdb.set_trace()
- checker.eipconfig.load(fromfile=eipcfg_path)
- checker.check_is_there_default_provider()
-
- sampleconfig = testdata.EIP_SAMPLE_CONFIG
- #eipcfg_path = checker._get_default_eipconfig_path()
- with open(eipcfg_path, 'w') as fp:
- json.dump(sampleconfig, fp)
- checker.eipconfig.load()
- self.assertTrue(checker.check_is_there_default_provider())
-
- def test_fetch_definition(self):
- with patch.object(requests, "get") as mocked_get:
- mocked_get.return_value.status_code = 200
- mocked_get.return_value.headers = {
- 'last-modified': "Wed Dec 12 12:12:12 GMT 2012"}
- mocked_get.return_value.json = DEFAULT_PROVIDER_DEFINITION
- checker = eipchecks.EIPConfigChecker(fetcher=requests)
- sampleconfig = testdata.EIP_SAMPLE_CONFIG
- checker.fetch_definition(config=sampleconfig)
-
- fn = os.path.join(baseconfig.get_default_provider_path(),
- DEFINITION_EXPECTED_PATH)
- with open(fn, 'r') as fp:
- deserialized = json.load(fp)
- self.assertEqual(DEFAULT_PROVIDER_DEFINITION, deserialized)
-
- # XXX TODO check for ConnectionError, HTTPError, InvalidUrl
- # (and proper EIPExceptions are raised).
- # Look at base.test_config.
-
- def test_fetch_eip_service_config(self):
- with patch.object(requests, "get") as mocked_get:
- mocked_get.return_value.status_code = 200
- mocked_get.return_value.headers = {
- 'last-modified': "Wed Dec 12 12:12:12 GMT 2012"}
- mocked_get.return_value.json = testdata.EIP_SAMPLE_SERVICE
- checker = eipchecks.EIPConfigChecker(fetcher=requests)
- sampleconfig = testdata.EIP_SAMPLE_CONFIG
- checker.fetch_eip_service_config(config=sampleconfig)
-
- def test_check_complete_eip_config(self):
- checker = eipchecks.EIPConfigChecker()
- with self.assertRaises(eipexceptions.EIPConfigurationError):
- sampleconfig = copy.copy(testdata.EIP_SAMPLE_CONFIG)
- sampleconfig['provider'] = None
- checker.check_complete_eip_config(config=sampleconfig)
- with self.assertRaises(eipexceptions.EIPConfigurationError):
- sampleconfig = copy.copy(testdata.EIP_SAMPLE_CONFIG)
- del sampleconfig['provider']
- checker.check_complete_eip_config(config=sampleconfig)
-
- # normal case
- sampleconfig = copy.copy(testdata.EIP_SAMPLE_CONFIG)
- checker.check_complete_eip_config(config=sampleconfig)
-
-
-class ProviderCertCheckerTest(BaseLeapTest):
-
- __name__ = "provider_cert_checker_tests"
- provider = "testprovider.example.org"
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- # test methods are there, and can be called from run_all
-
- def test_checker_should_implement_check_methods(self):
- checker = eipchecks.ProviderCertChecker()
-
- # For MVS+
- self.assertTrue(hasattr(checker, "download_ca_cert"),
- "missing meth")
- self.assertTrue(hasattr(checker, "download_ca_signature"),
- "missing meth")
- self.assertTrue(hasattr(checker, "get_ca_signatures"), "missing meth")
- self.assertTrue(hasattr(checker, "is_there_trust_path"),
- "missing meth")
-
- # For MVS
- self.assertTrue(hasattr(checker, "is_there_provider_ca"),
- "missing meth")
- self.assertTrue(hasattr(checker, "is_https_working"), "missing meth")
- self.assertTrue(hasattr(checker, "check_new_cert_needed"),
- "missing meth")
-
- def test_checker_should_actually_call_all_tests(self):
- checker = eipchecks.ProviderCertChecker()
-
- mc = Mock()
- checker.run_all(checker=mc)
- # XXX MVS+
- #self.assertTrue(mc.download_ca_cert.called, "not called")
- #self.assertTrue(mc.download_ca_signature.called, "not called")
- #self.assertTrue(mc.get_ca_signatures.called, "not called")
- #self.assertTrue(mc.is_there_trust_path.called, "not called")
-
- # For MVS
- self.assertTrue(mc.is_there_provider_ca.called, "not called")
- self.assertTrue(mc.is_https_working.called,
- "not called")
- self.assertTrue(mc.check_new_cert_needed.called,
- "not called")
-
- # test individual check methods
-
- @unittest.skip
- def test_is_there_provider_ca(self):
- # XXX commenting out this test.
- # With the generic client this does not make sense,
- # we should dump one there.
- # or test conductor logic.
- checker = eipchecks.ProviderCertChecker()
- self.assertTrue(
- checker.is_there_provider_ca())
-
-
-class ProviderCertCheckerHTTPSTests(BaseHTTPSServerTestCase, BaseLeapTest):
- provider = "testprovider.example.org"
-
- class request_handler(NoLogRequestHandler, BaseHTTPRequestHandler):
- responses = {
- '/': ['OK', ''],
- '/client.cert': [
- # XXX get sample cert
- '-----BEGIN CERTIFICATE-----',
- '-----END CERTIFICATE-----'],
- '/badclient.cert': [
- 'BADCERT']}
-
- def do_GET(self):
- path = urlparse.urlparse(self.path)
- message = '\n'.join(self.responses.get(
- path.path, None))
- self.send_response(200)
- self.end_headers()
- self.wfile.write(message)
-
- def test_is_https_working(self):
- fetcher = requests
- uri = "https://%s/" % (self.get_server())
- # bare requests call. this should just pass (if there is
- # an https service there).
- fetcher.get(uri, verify=False)
- checker = eipchecks.ProviderCertChecker(fetcher=fetcher)
- self.assertTrue(checker.is_https_working(uri=uri, verify=False))
-
- # for local debugs, when in doubt
- #self.assertTrue(checker.is_https_working(uri="https://github.com",
- #verify=True))
-
- # for the two checks below, I know they fail because no ca
- # cert is passed to them, and I know that's the error that
- # requests return with our implementation.
- # We're receiving this because our
- # server is dying prematurely when the handshake is interrupted on the
- # client side.
- # Since we have access to the server, we could check that
- # the error raised has been:
- # SSL23_READ_BYTES: alert bad certificate
- with self.assertRaises(requests.exceptions.SSLError) as exc:
- fetcher.get(uri, verify=True)
- self.assertTrue(
- "SSL23_GET_SERVER_HELLO:unknown protocol" in exc.message)
-
- # XXX FIXME! Uncomment after #638 is done
- #with self.assertRaises(eipexceptions.EIPBadCertError) as exc:
- #checker.is_https_working(uri=uri, verify=True)
- #self.assertTrue(
- #"cert verification failed" in exc.message)
-
- # get cacert from testing.https_server
- cacert = where_cert('cacert.pem')
- fetcher.get(uri, verify=cacert)
- self.assertTrue(checker.is_https_working(uri=uri, verify=cacert))
-
- # same, but get cacert from leap.custom
- # XXX TODO!
-
- @unittest.skip
- def test_download_new_client_cert(self):
- # FIXME
- # Magick srp decorator broken right now...
- # Have to mock the decorator and inject something that
- # can bypass the authentication
-
- uri = "https://%s/client.cert" % (self.get_server())
- cacert = where_cert('cacert.pem')
- checker = eipchecks.ProviderCertChecker(domain=self.provider)
- credentials = "testuser", "testpassword"
- self.assertTrue(checker.download_new_client_cert(
- credentials=credentials, uri=uri, verify=cacert))
-
- # now download a malformed cert
- uri = "https://%s/badclient.cert" % (self.get_server())
- cacert = where_cert('cacert.pem')
- checker = eipchecks.ProviderCertChecker()
- with self.assertRaises(ValueError):
- self.assertTrue(checker.download_new_client_cert(
- credentials=credentials, uri=uri, verify=cacert))
-
- # did we write cert to its path?
- clientcertfile = eipspecs.client_cert_path()
- self.assertTrue(os.path.isfile(clientcertfile))
- certfile = eipspecs.client_cert_path()
- with open(certfile, 'r') as cf:
- certcontent = cf.read()
- self.assertEqual(certcontent,
- '\n'.join(
- self.request_handler.responses['/client.cert']))
- os.remove(clientcertfile)
-
- def test_is_cert_valid(self):
- checker = eipchecks.ProviderCertChecker()
- # TODO: better exception catching
- # should raise eipexceptions.BadClientCertificate, and give reasons
- # on msg.
- with self.assertRaises(Exception) as exc:
- self.assertFalse(checker.is_cert_valid())
- exc.message = "missing cert"
-
- def test_bad_validity_certs(self):
- checker = eipchecks.ProviderCertChecker()
- certfile = where_cert('leaptestscert.pem')
- self.assertFalse(checker.is_cert_not_expired(
- certfile=certfile,
- now=lambda: time.mktime((2038, 1, 1, 1, 1, 1, 1, 1, 1))))
- self.assertFalse(checker.is_cert_not_expired(
- certfile=certfile,
- now=lambda: time.mktime((1970, 1, 1, 1, 1, 1, 1, 1, 1))))
-
- def test_check_new_cert_needed(self):
- # check: missing cert
- checker = eipchecks.ProviderCertChecker(domain=self.provider)
- self.assertTrue(checker.check_new_cert_needed(skip_download=True))
- # TODO check: malformed cert
- # TODO check: expired cert
- # TODO check: pass test server uri instead of skip
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/eip/tests/test_config.py b/src/leap/eip/tests/test_config.py
deleted file mode 100644
index 72ab3c8e..00000000
--- a/src/leap/eip/tests/test_config.py
+++ /dev/null
@@ -1,298 +0,0 @@
-from collections import OrderedDict
-import json
-import os
-import platform
-import stat
-
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
-#from leap.base import constants
-#from leap.eip import config as eip_config
-#from leap import __branding as BRANDING
-from leap.eip import config as eipconfig
-from leap.eip.tests.data import EIP_SAMPLE_CONFIG, EIP_SAMPLE_SERVICE
-from leap.testing.basetest import BaseLeapTest
-from leap.util.fileutil import mkdir_p, mkdir_f
-
-_system = platform.system()
-
-#PROVIDER = BRANDING.get('provider_domain')
-#PROVIDER_SHORTNAME = BRANDING.get('short_name')
-
-
-class EIPConfigTest(BaseLeapTest):
-
- __name__ = "eip_config_tests"
- provider = "testprovider.example.org"
-
- maxDiff = None
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- #
- # helpers
- #
-
- def touch_exec(self):
- path = os.path.join(
- self.tempdir, 'bin')
- mkdir_p(path)
- tfile = os.path.join(
- path,
- 'openvpn')
- open(tfile, 'wb').close()
- os.chmod(tfile, stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
-
- def write_sample_eipservice(self, vpnciphers=False, extra_vpnopts=None,
- gateways=None):
- conf = eipconfig.EIPServiceConfig()
- mkdir_f(conf.filename)
- if gateways:
- EIP_SAMPLE_SERVICE['gateways'] = gateways
- if vpnciphers:
- openvpnconfig = OrderedDict({
- "auth": "SHA1",
- "cipher": "AES-128-CBC",
- "tls-cipher": "DHE-RSA-AES128-SHA"})
- if extra_vpnopts:
- for k, v in extra_vpnopts.items():
- openvpnconfig[k] = v
- EIP_SAMPLE_SERVICE['openvpn_configuration'] = openvpnconfig
-
- with open(conf.filename, 'w') as fd:
- fd.write(json.dumps(EIP_SAMPLE_SERVICE))
-
- def write_sample_eipconfig(self):
- conf = eipconfig.EIPConfig()
- folder, f = os.path.split(conf.filename)
- if not os.path.isdir(folder):
- mkdir_p(folder)
- with open(conf.filename, 'w') as fd:
- fd.write(json.dumps(EIP_SAMPLE_CONFIG))
-
- def get_expected_openvpn_args(self, with_openvpn_ciphers=False):
- """
- yeah, this is almost as duplicating the
- code for building the command
- """
- args = []
- eipconf = eipconfig.EIPConfig(domain=self.provider)
- eipconf.load()
- eipsconf = eipconfig.EIPServiceConfig(domain=self.provider)
- eipsconf.load()
-
- username = self.get_username()
- groupname = self.get_groupname()
-
- args.append('--client')
- args.append('--dev')
- #does this have to be tap for win??
- args.append('tun')
- args.append('--persist-tun')
- args.append('--persist-key')
- args.append('--remote')
-
- args.append('%s' % eipconfig.get_eip_gateway(
- eipconfig=eipconf,
- eipserviceconfig=eipsconf))
- # XXX get port!?
- args.append('1194')
- # XXX get proto
- args.append('udp')
- args.append('--tls-client')
- args.append('--remote-cert-tls')
- args.append('server')
-
- if with_openvpn_ciphers:
- CIPHERS = [
- "--tls-cipher", "DHE-RSA-AES128-SHA",
- "--cipher", "AES-128-CBC",
- "--auth", "SHA1"]
- for opt in CIPHERS:
- args.append(opt)
-
- args.append('--user')
- args.append(username)
- args.append('--group')
- args.append(groupname)
- args.append('--management-client-user')
- args.append(username)
- args.append('--management-signal')
-
- args.append('--management')
- #XXX hey!
- #get platform switches here!
- args.append('/tmp/test.socket')
- args.append('unix')
-
- args.append('--script-security')
- args.append('2')
-
- if _system == "Linux":
- UPDOWN_SCRIPT = "/etc/leap/resolv-update"
- if os.path.isfile(UPDOWN_SCRIPT):
- args.append('--up')
- args.append('/etc/leap/resolv-update')
- args.append('--down')
- args.append('/etc/leap/resolv-update')
- args.append('--plugin')
- args.append('/usr/lib/openvpn/openvpn-down-root.so')
- args.append("'script_type=down /etc/leap/resolv-update'")
-
- # certs
- # XXX get values from specs?
- args.append('--cert')
- args.append(os.path.join(
- self.home,
- '.config', 'leap', 'providers',
- '%s' % self.provider,
- 'keys', 'client',
- 'openvpn.pem'))
- args.append('--key')
- args.append(os.path.join(
- self.home,
- '.config', 'leap', 'providers',
- '%s' % self.provider,
- 'keys', 'client',
- 'openvpn.pem'))
- args.append('--ca')
- args.append(os.path.join(
- self.home,
- '.config', 'leap', 'providers',
- '%s' % self.provider,
- 'keys', 'ca',
- 'cacert.pem'))
- return args
-
- # build command string
- # these tests are going to have to check
- # many combinations. we should inject some
- # params in the function call, to disable
- # some checks.
-
- def test_get_eip_gateway(self):
- self.write_sample_eipconfig()
- eipconf = eipconfig.EIPConfig(domain=self.provider)
-
- # default eipservice
- self.write_sample_eipservice()
- eipsconf = eipconfig.EIPServiceConfig(domain=self.provider)
-
- gateway = eipconfig.get_eip_gateway(
- eipconfig=eipconf,
- eipserviceconfig=eipsconf)
-
- # in spec is local gateway by default
- self.assertEqual(gateway, '127.0.0.1')
-
- # change eipservice
- # right now we only check that cluster == selected primary gw in
- # eip.json, and pick first matching ip
- eipconf._config.config['primary_gateway'] = "foo_provider"
- newgateways = [{"cluster": "foo_provider",
- "ip_address": "127.0.0.99"}]
- self.write_sample_eipservice(gateways=newgateways)
- eipsconf = eipconfig.EIPServiceConfig(domain=self.provider)
- # load from disk file
- eipsconf.load()
-
- gateway = eipconfig.get_eip_gateway(
- eipconfig=eipconf,
- eipserviceconfig=eipsconf)
- self.assertEqual(gateway, '127.0.0.99')
-
- # change eipservice, several gateways
- # right now we only check that cluster == selected primary gw in
- # eip.json, and pick first matching ip
- eipconf._config.config['primary_gateway'] = "bar_provider"
- newgateways = [{"cluster": "foo_provider",
- "ip_address": "127.0.0.99"},
- {'cluster': "bar_provider",
- "ip_address": "127.0.0.88"}]
- self.write_sample_eipservice(gateways=newgateways)
- eipsconf = eipconfig.EIPServiceConfig(domain=self.provider)
- # load from disk file
- eipsconf.load()
-
- gateway = eipconfig.get_eip_gateway(
- eipconfig=eipconf,
- eipserviceconfig=eipsconf)
- self.assertEqual(gateway, '127.0.0.88')
-
- def test_build_ovpn_command_empty_config(self):
- self.touch_exec()
- self.write_sample_eipservice()
- self.write_sample_eipconfig()
-
- from leap.eip import config as eipconfig
- from leap.util.fileutil import which
- path = os.environ['PATH']
- vpnbin = which('openvpn', path=path)
- #print 'path =', path
- #print 'vpnbin = ', vpnbin
- vpncommand, vpnargs = eipconfig.build_ovpn_command(
- do_pkexec_check=False, vpnbin=vpnbin,
- socket_path="/tmp/test.socket",
- provider=self.provider)
- self.assertEqual(vpncommand, self.home + '/bin/openvpn')
- self.assertEqual(vpnargs, self.get_expected_openvpn_args())
-
- def test_build_ovpn_command_openvpnoptions(self):
- self.touch_exec()
-
- from leap.eip import config as eipconfig
- from leap.util.fileutil import which
- path = os.environ['PATH']
- vpnbin = which('openvpn', path=path)
-
- self.write_sample_eipconfig()
-
- # regular run, everything normal
- self.write_sample_eipservice(vpnciphers=True)
- vpncommand, vpnargs = eipconfig.build_ovpn_command(
- do_pkexec_check=False, vpnbin=vpnbin,
- socket_path="/tmp/test.socket",
- provider=self.provider)
- self.assertEqual(vpncommand, self.home + '/bin/openvpn')
- expected = self.get_expected_openvpn_args(
- with_openvpn_ciphers=True)
- self.assertEqual(vpnargs, expected)
-
- # bad options -- illegal options
- self.write_sample_eipservice(
- vpnciphers=True,
- # WE ONLY ALLOW vpn options in auth, cipher, tls-cipher
- extra_vpnopts={"notallowedconfig": "badvalue"})
- vpncommand, vpnargs = eipconfig.build_ovpn_command(
- do_pkexec_check=False, vpnbin=vpnbin,
- socket_path="/tmp/test.socket",
- provider=self.provider)
- self.assertEqual(vpncommand, self.home + '/bin/openvpn')
- expected = self.get_expected_openvpn_args(
- with_openvpn_ciphers=True)
- self.assertEqual(vpnargs, expected)
-
- # bad options -- illegal chars
- self.write_sample_eipservice(
- vpnciphers=True,
- # WE ONLY ALLOW A-Z09\-
- extra_vpnopts={"cipher": "AES-128-CBC;FOOTHING"})
- vpncommand, vpnargs = eipconfig.build_ovpn_command(
- do_pkexec_check=False, vpnbin=vpnbin,
- socket_path="/tmp/test.socket",
- provider=self.provider)
- self.assertEqual(vpncommand, self.home + '/bin/openvpn')
- expected = self.get_expected_openvpn_args(
- with_openvpn_ciphers=True)
- self.assertEqual(vpnargs, expected)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/eip/tests/test_eipconnection.py b/src/leap/eip/tests/test_eipconnection.py
deleted file mode 100644
index 163f8d45..00000000
--- a/src/leap/eip/tests/test_eipconnection.py
+++ /dev/null
@@ -1,216 +0,0 @@
-import glob
-import logging
-import platform
-#import os
-import shutil
-
-logging.basicConfig()
-logger = logging.getLogger(name=__name__)
-
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
-from mock import Mock, patch # MagicMock
-
-from leap.eip.eipconnection import EIPConnection
-from leap.eip.exceptions import ConnectionRefusedError
-from leap.eip import specs as eipspecs
-from leap.testing.basetest import BaseLeapTest
-
-_system = platform.system()
-
-PROVIDER = "testprovider.example.org"
-
-
-class NotImplementedError(Exception):
- pass
-
-
-@patch('OpenVPNConnection._get_or_create_config')
-@patch('OpenVPNConnection._set_ovpn_command')
-class MockedEIPConnection(EIPConnection):
-
- def _set_ovpn_command(self):
- self.command = "mock_command"
- self.args = [1, 2, 3]
-
-
-class EIPConductorTest(BaseLeapTest):
-
- __name__ = "eip_conductor_tests"
- provider = PROVIDER
-
- def setUp(self):
- # XXX there's a conceptual/design
- # mistake here.
- # If we're testing just attrs after init,
- # init shold not be doing so much side effects.
-
- # for instance:
- # We have to TOUCH a keys file because
- # we're triggerig the key checks FROM
- # the constructor. me not like that,
- # key checker should better be called explicitelly.
-
- # XXX change to keys_checker invocation
- # (see config_checker)
-
- keyfiles = (eipspecs.provider_ca_path(domain=self.provider),
- eipspecs.client_cert_path(domain=self.provider))
- for filepath in keyfiles:
- self.touch(filepath)
- self.chmod600(filepath)
-
- # we init the manager with only
- # some methods mocked
- self.manager = Mock(name="openvpnmanager_mock")
- self.con = MockedEIPConnection()
- self.con.provider = self.provider
-
- # XXX watch out. This sometimes is throwing the following error:
- # NoSuchProcess: process no longer exists (pid=6571)
- # because of a bad implementation of _check_if_running_instance
-
- self.con.run_openvpn_checks()
-
- def tearDown(self):
- pass
-
- def doCleanups(self):
- super(BaseLeapTest, self).doCleanups()
- self.cleanupSocketDir()
- del self.con
-
- def cleanupSocketDir(self):
- ptt = ('/tmp/leap-tmp*')
- for tmpdir in glob.glob(ptt):
- shutil.rmtree(tmpdir)
-
- #
- # tests
- #
-
- def test_vpnconnection_defaults(self):
- """
- default attrs as expected
- """
- con = self.con
- self.assertEqual(con.autostart, True)
- # XXX moar!
-
- def test_ovpn_command(self):
- """
- set_ovpn_command called
- """
- self.assertEqual(self.con.command,
- "mock_command")
- self.assertEqual(self.con.args,
- [1, 2, 3])
-
- # config checks
-
- def test_config_checked_called(self):
- # XXX this single test is taking half of the time
- # needed to run tests. (roughly 3 secs for this only)
- # We should modularize and inject Mocks on more places.
-
- oldcon = self.con
- del(self.con)
- config_checker = Mock()
- self.con = MockedEIPConnection(config_checker=config_checker)
- self.assertTrue(config_checker.called)
- self.con.run_checks()
- self.con.config_checker.run_all.assert_called_with(
- skip_download=False)
-
- # XXX test for cert_checker also
- self.con = oldcon
-
- # connect/disconnect calls
-
- def test_disconnect(self):
- """
- disconnect method calls private and changes status
- """
- self.con._disconnect = Mock(
- name="_disconnect")
-
- # first we set status to connected
- self.con.status.set_current(self.con.status.CONNECTED)
- self.assertEqual(self.con.status.current,
- self.con.status.CONNECTED)
-
- # disconnect
- self.con.terminate_openvpn_connection = Mock()
- self.con.disconnect()
- self.con.terminate_openvpn_connection.assert_called_once_with(
- shutdown=False)
- self.con.terminate_openvpn_connection = Mock()
- self.con.disconnect(shutdown=True)
- self.con.terminate_openvpn_connection.assert_called_once_with(
- shutdown=True)
-
- # new status should be disconnected
- # XXX this should evolve and check no errors
- # during disconnection
- self.assertEqual(self.con.status.current,
- self.con.status.DISCONNECTED)
-
- def test_connect(self):
- """
- connect calls _launch_openvpn private
- """
- self.con._launch_openvpn = Mock()
- self.con.connect()
- self.con._launch_openvpn.assert_called_once_with()
-
- # XXX tests breaking here ...
-
- def test_good_poll_connection_state(self):
- """
- """
- #@patch --
- # self.manager.get_connection_state
-
- #XXX review this set of poll_state tests
- #they SHOULD NOT NEED TO MOCK ANYTHING IN THE
- #lower layers!! -- status, vpn_manager..
- #right now we're testing implementation, not
- #behavior!!!
- good_state = ["1345466946", "unknown_state", "ok",
- "192.168.1.1", "192.168.1.100"]
- self.con.get_connection_state = Mock(return_value=good_state)
- self.con.status.set_vpn_state = Mock()
-
- state = self.con.poll_connection_state()
- good_state[1] = "disconnected"
- final_state = tuple(good_state)
- self.con.status.set_vpn_state.assert_called_with("unknown_state")
- self.assertEqual(state, final_state)
-
- # TODO between "good" and "bad" (exception raised) cases,
- # we can still test for malformed states and see that only good
- # states do have a change (and from only the expected transition
- # states).
-
- def test_bad_poll_connection_state(self):
- """
- get connection state raises ConnectionRefusedError
- state is None
- """
- self.con.get_connection_state = Mock(
- side_effect=ConnectionRefusedError('foo!'))
- state = self.con.poll_connection_state()
- self.assertEqual(state, None)
-
-
- # XXX more things to test:
- # - called config routines during initz.
- # - raising proper exceptions with no config
- # - called proper checks on config / permissions
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/eip/tests/test_openvpnconnection.py b/src/leap/eip/tests/test_openvpnconnection.py
deleted file mode 100644
index 95bfb2f0..00000000
--- a/src/leap/eip/tests/test_openvpnconnection.py
+++ /dev/null
@@ -1,161 +0,0 @@
-import logging
-import os
-import platform
-import psutil
-import shutil
-#import socket
-
-logging.basicConfig()
-logger = logging.getLogger(name=__name__)
-
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
-from mock import Mock, patch # MagicMock
-
-from leap.eip import config as eipconfig
-from leap.eip import openvpnconnection
-from leap.eip import exceptions as eipexceptions
-from leap.eip.udstelnet import UDSTelnet
-from leap.testing.basetest import BaseLeapTest
-
-_system = platform.system()
-
-
-class NotImplementedError(Exception):
- pass
-
-
-mock_UDSTelnet = Mock(spec=UDSTelnet)
-# XXX cautious!!!
-# this might be fragile right now (counting a global
-# reference of calls I think.
-# investigate this other form instead:
-# http://www.voidspace.org.uk/python/mock/patch.html#start-and-stop
-
-# XXX redo after merge-refactor
-
-
-@patch('openvpnconnection.OpenVPNConnection.connect_to_management')
-class MockedOpenVPNConnection(openvpnconnection.OpenVPNConnection):
- def __init__(self, *args, **kwargs):
- self.mock_UDSTelnet = Mock()
- super(MockedOpenVPNConnection, self).__init__(
- *args, **kwargs)
- self.tn = self.mock_UDSTelnet(self.host, self.port)
-
- def connect_to_management(self):
- #print 'patched connect'
- self.tn = mock_UDSTelnet(self.host, port=self.port)
-
-
-class OpenVPNConnectionTest(BaseLeapTest):
-
- __name__ = "vpnconnection_tests"
-
- def setUp(self):
- # XXX this will have to change for win, host=localhost
- host = eipconfig.get_socket_path()
- self.host = host
- self.manager = MockedOpenVPNConnection(host=host)
-
- def tearDown(self):
- pass
-
- def doCleanups(self):
- super(BaseLeapTest, self).doCleanups()
- self.cleanupSocketDir()
-
- def cleanupSocketDir(self):
- # remove the socket folder.
- # XXX only if posix. in win, host is localhost, so nothing
- # has to be done.
- if self.host:
- folder, fpath = os.path.split(self.host)
- try:
- assert folder.startswith('/tmp/leap-tmp') # safety check
- shutil.rmtree(folder)
- except:
- self.fail("could not remove temp file")
-
- del self.manager
-
- #
- # tests
- #
-
- def test_detect_vpn(self):
- # XXX review, not sure if captured all the logic
- # while fixing. kali.
- openvpn_connection = openvpnconnection.OpenVPNConnection()
-
- with patch.object(psutil, "process_iter") as mocked_psutil:
- mocked_process = Mock()
- mocked_process.name = "openvpn"
- mocked_process.cmdline = ["openvpn", "-foo", "-bar", "-gaaz"]
- mocked_psutil.return_value = [mocked_process]
- with self.assertRaises(eipexceptions.OpenVPNAlreadyRunning):
- openvpn_connection._check_if_running_instance()
-
- openvpn_connection._check_if_running_instance()
-
- @unittest.skipIf(_system == "Windows", "lin/mac only")
- def test_lin_mac_default_init(self):
- """
- check default host for management iface
- """
- self.assertTrue(self.manager.host.startswith('/tmp/leap-tmp'))
- self.assertEqual(self.manager.port, 'unix')
-
- @unittest.skipUnless(_system == "Windows", "win only")
- def test_win_default_init(self):
- """
- check default host for management iface
- """
- # XXX should we make the platform specific switch
- # here or in the vpn command string building?
- self.assertEqual(self.manager.host, 'localhost')
- self.assertEqual(self.manager.port, 7777)
-
- def test_port_types_init(self):
- oldmanager = self.manager
- self.manager = MockedOpenVPNConnection(port="42")
- self.assertEqual(self.manager.port, 42)
- self.manager = MockedOpenVPNConnection()
- self.assertEqual(self.manager.port, "unix")
- self.manager = MockedOpenVPNConnection(port="bad")
- self.assertEqual(self.manager.port, None)
- self.manager = oldmanager
-
- def test_uds_telnet_called_on_connect(self):
- self.manager.connect_to_management()
- mock_UDSTelnet.assert_called_with(
- self.manager.host,
- port=self.manager.port)
-
- @unittest.skip
- def test_connect(self):
- raise NotImplementedError
- # XXX calls close
- # calls UDSTelnet mock.
-
- # XXX
- # tests to write:
- # UDSTelnetTest (for real?)
- # HAVE A LOOK AT CORE TESTS FOR TELNETLIB.
- # very illustrative instead...
-
- # - raise MissingSocket
- # - raise ConnectionRefusedError
- # - test send command
- # - tries connect
- # - ... tries?
- # - ... calls _seek_to_eof
- # - ... read_until --> return value
- # - ...
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/eip/udstelnet.py b/src/leap/eip/udstelnet.py
deleted file mode 100644
index 18e927c2..00000000
--- a/src/leap/eip/udstelnet.py
+++ /dev/null
@@ -1,38 +0,0 @@
-import os
-import socket
-import telnetlib
-
-from leap.eip import exceptions as eip_exceptions
-
-
-class UDSTelnet(telnetlib.Telnet):
- """
- a telnet-alike class, that can listen
- on unix domain sockets
- """
-
- def open(self, host, port=23, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
- """Connect to a host. If port is 'unix', it
- will open a connection over unix docmain sockets.
-
- The optional second argument is the port number, which
- defaults to the standard telnet port (23).
-
- Don't try to reopen an already connected instance.
- """
- self.eof = 0
- self.host = host
- self.port = port
- self.timeout = timeout
-
- if self.port == "unix":
- # unix sockets spoken
- if not os.path.exists(self.host):
- raise eip_exceptions.MissingSocketError
- self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- try:
- self.sock.connect(self.host)
- except socket.error:
- raise eip_exceptions.ConnectionRefusedError
- else:
- self.sock = socket.create_connection((host, port), timeout)
diff --git a/src/leap/email/__init__.py b/src/leap/email/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/email/__init__.py
+++ /dev/null
diff --git a/src/leap/email/smtp/README b/src/leap/email/smtp/README
deleted file mode 100644
index 2b2a1180..00000000
--- a/src/leap/email/smtp/README
+++ /dev/null
@@ -1,43 +0,0 @@
-Leap SMTP Relay
-===============
-
-Outgoing mail workflow:
-
- * LEAP client runs a thin SMTP proxy on the user's device, bound to
- localhost.
- * User's MUA is configured outgoing SMTP to localhost
- * When SMTP proxy receives an email from MUA
- * SMTP proxy queries Key Manager for the user's private key and public
- keys of all recipients
- * Message is signed by sender and encrypted to recipients.
- * If recipient's key is missing, email goes out in cleartext (unless
- user has configured option to send only encrypted email)
- * Finally, message is relayed to provider's SMTP relay
-
-
-Dependencies
-------------
-
-Leap SMTP Relay depends on the following python libraries:
-
- * Twisted 12.3.0 [1]
- * zope.interface 4.0.3 [2]
-
-[1] http://pypi.python.org/pypi/Twisted/12.3.0
-[2] http://pypi.python.org/pypi/zope.interface/4.0.3
-
-
-How to run
-----------
-
-To launch the SMTP relay, run the following command:
-
- twistd -y smtprelay.tac
-
-
-Running tests
--------------
-
-Tests are run using Twisted's Trial API, like this:
-
- trial leap.email.smtp.tests
diff --git a/src/leap/email/smtp/__init__.py b/src/leap/email/smtp/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/email/smtp/__init__.py
+++ /dev/null
diff --git a/src/leap/email/smtp/smtprelay.py b/src/leap/email/smtp/smtprelay.py
deleted file mode 100644
index fdb8eb91..00000000
--- a/src/leap/email/smtp/smtprelay.py
+++ /dev/null
@@ -1,207 +0,0 @@
-import re
-import gnupg
-from zope.interface import implements
-from StringIO import StringIO
-from twisted.mail import smtp
-from twisted.internet.protocol import ServerFactory
-from twisted.internet import reactor
-from twisted.internet import defer
-from twisted.application import internet, service
-from twisted.python import log
-from email.Header import Header
-
-
-class SMTPFactory(ServerFactory):
- """
- Factory for an SMTP server with encrypted relaying capabilities.
- """
-
- def __init__(self, gpg=None):
- self._gpg = gpg
-
- def buildProtocol(self, addr):
- "Return a protocol suitable for the job."
- # TODO: use ESMTP here.
- smtpProtocol = smtp.SMTP(SMTPDelivery(self._gpg))
- smtpProtocol.factory = self
- return smtpProtocol
-
-
-class SMTPDelivery(object):
- """
- Validate email addresses and handle message delivery.
- """
-
- implements(smtp.IMessageDelivery)
-
- def __init__(self, gpg=None):
- if gpg:
- self._gpg = gpg
- else:
- self._gpg = GPGWrapper()
-
- def receivedHeader(self, helo, origin, recipients):
- myHostname, clientIP = helo
- headerValue = "by %s from %s with ESMTP ; %s" % (
- myHostname, clientIP, smtp.rfc822date())
- # email.Header.Header used for automatic wrapping of long lines
- return "Received: %s" % Header(headerValue)
-
- def validateTo(self, user):
- """Assert existence of and trust on recipient's GPG public key."""
- # try to find recipient's public key
- try:
- # this will raise an exception if key is not found
- trust = self._gpg.find_key(user.dest.addrstr)['trust']
- # if key is not ultimatelly trusted, then the message will not
- # be encrypted. So, we check for this below
- #if trust != 'u':
- # raise smtp.SMTPBadRcpt(user)
- log.msg("Accepting mail for %s..." % user.dest)
- return lambda: EncryptedMessage(user, gpg=self._gpg)
- except LookupError:
- raise smtp.SMTPBadRcpt(user)
-
- def validateFrom(self, helo, originAddress):
- # accept mail from anywhere. To reject an address, raise
- # smtp.SMTPBadSender here.
- return originAddress
-
-
-class EncryptedMessage():
- """
- Receive plaintext from client, encrypt it and send message to a
- recipient.
- """
- implements(smtp.IMessage)
-
- SMTP_HOSTNAME = "mail.riseup.net"
- SMTP_PORT = 25
-
- def __init__(self, user, gpg=None):
- self.user = user
- self.getSMTPInfo()
- self.lines = []
- if gpg:
- self._gpg = gpg
- else:
- self._gpg = GPGWrapper()
-
- def lineReceived(self, line):
- """Store email DATA lines as they arrive."""
- self.lines.append(line)
-
- def eomReceived(self):
- """Encrypt and send message."""
- log.msg("Message data complete.")
- self.lines.append('') # add a trailing newline
- self.parseMessage()
- try:
- self.encrypt()
- return self.sendMessage()
- except LookupError:
- return None
-
- def parseMessage(self):
- """Separate message headers from body."""
- sep = self.lines.index('')
- self.headers = self.lines[:sep]
- self.body = self.lines[sep + 1:]
-
- def connectionLost(self):
- log.msg("Connection lost unexpectedly!")
- log.err()
- # unexpected loss of connection; don't save
- self.lines = []
-
- def sendSuccess(self, r):
- log.msg(r)
-
- def sendError(self, e):
- log.msg(e)
- log.err()
-
- def prepareHeader(self):
- self.headers.insert(1, "From: %s" % self.user.orig.addrstr)
- self.headers.insert(2, "To: %s" % self.user.dest.addrstr)
- self.headers.append('')
-
- def sendMessage(self):
- self.prepareHeader()
- msg = '\n'.join(self.headers + [self.cyphertext])
- d = defer.Deferred()
- factory = smtp.ESMTPSenderFactory(self.smtp_username,
- self.smtp_password,
- self.smtp_username,
- self.user.dest.addrstr,
- StringIO(msg),
- d)
- # the next call is TSL-powered!
- reactor.connectTCP(self.SMTP_HOSTNAME, self.SMTP_PORT, factory)
- d.addCallback(self.sendSuccess)
- d.addErrback(self.sendError)
- return d
-
- def encrypt(self, always_trust=True):
- # TODO: do not "always trust" here.
- fp = self._gpg.find_key(self.user.dest.addrstr)['fingerprint']
- log.msg("Encrypting to %s" % fp)
- self.cyphertext = str(self._gpg.encrypt('\n'.join(self.body), [fp],
- always_trust=always_trust))
-
- # this will be replaced by some other mechanism of obtaining credentials
- # for SMTP server.
- def getSMTPInfo(self):
- #f = open('/media/smtp-info.txt', 'r')
- #self.smtp_host = f.readline().rstrip()
- #self.smtp_port = f.readline().rstrip()
- #self.smtp_username = f.readline().rstrip()
- #self.smtp_password = f.readline().rstrip()
- #f.close()
- self.smtp_host = ''
- self.smtp_port = ''
- self.smtp_username = ''
- self.smtp_password = ''
-
-
-class GPGWrapper():
- """
- This is a temporary class for handling GPG requests, and should be
- replaced by a more general class used throughout the project.
- """
-
- GNUPG_HOME = "~/.config/leap/gnupg"
- GNUPG_BINARY = "/usr/bin/gpg" # TODO: change this based on OS
-
- def __init__(self, gpghome=GNUPG_HOME, gpgbinary=GNUPG_BINARY):
- self.gpg = gnupg.GPG(gnupghome=gpghome, gpgbinary=gpgbinary)
-
- def find_key(self, email):
- """
- Find user's key based on their email.
- """
- for key in self.gpg.list_keys():
- for uid in key['uids']:
- if re.search(email, uid):
- return key
- raise LookupError("GnuPG public key for %s not found!" % email)
-
- def encrypt(self, data, recipient, always_trust=True):
- # TODO: do not 'always_trust'.
- return self.gpg.encrypt(data, recipient, always_trust=always_trust)
-
- def decrypt(self, data):
- return self.gpg.decrypt(data)
-
- def import_keys(self, data):
- return self.gpg.import_keys(data)
-
-
-# service configuration
-port = 25
-factory = SMTPFactory()
-
-# these enable the use of this service with twistd
-application = service.Application("LEAP SMTP Relay")
-service = internet.TCPServer(port, factory)
-service.setServiceParent(application)
diff --git a/src/leap/email/smtp/tests/185CA770.key b/src/leap/email/smtp/tests/185CA770.key
deleted file mode 100644
index 587b4164..00000000
--- a/src/leap/email/smtp/tests/185CA770.key
+++ /dev/null
@@ -1,79 +0,0 @@
------BEGIN PGP PRIVATE KEY BLOCK-----
-Version: GnuPG v1.4.10 (GNU/Linux)
-
-lQIVBFCJNL4BEADFsI1TCD4yq7ZqL7VhdVviTuX6JUps8/mVEhRVOZhojLcTYaqQ
-gs6T6WabRxcK7ymOnf4K8NhYdz6HFoJN46BT87etokx7J/Sl2OhpiqBQEY+jW8Rp
-+3MSGrGmvFw0s1lGrz/cXzM7UNgWSTOnYZ5nJS1veMhy0jseZOUK7ekp2oEDjGZh
-pzgd3zICCR2SvlpLIXB2Nr/CUcuRWTcc5LlKmbjMybu0E/uuY14st3JL+7qI6QX0
-atFm0VhFVpagOl0vWKxakUx4hC7j1wH2ADlCvSZPG0StSLUyHkJx3UPsmYxOZFao
-ATED3Okjwga6E7PJEbzyqAkvzw/M973kaZCUSH75ZV0cQnpdgXV3DK1gSa3d3gug
-W1lE0V7pwnN2NTOYfBMi+WloCs/bp4iZSr4QP1duZ3IqKraeBDCk7MoFo4A9Wk07
-kvqPwF9IBgatu62WVEZIzwyViN+asFUGfgp+8D7gtnlWAw0V6y/lSTzyl+dnLP98
-Hfr2eLBylFs+Kl3Pivpg2uHw09LLCrjeLEN3dj9SfBbA9jDIo9Zhs1voiIK/7Shx
-E0BRJaBgG3C4QaytYEu7RFFOKuvBai9w2Y5OfsKFo8rA7v4dxFFDvzKGujCtNnwf
-oyaGlZmMBU5MUmHUNiG8ON21COZBtK5oMScuY1VC9CQonj3OClg3IbU9SQARAQAB
-/gNlAkdOVQG0JGRyZWJzIChncGcgdGVzdCBrZXkpIDxkcmVic0BsZWFwLnNlPokC
-OAQTAQIAIgUCUIk0vgIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AACgkQty9e
-xhhcp3Bdhw//bdPUNbp6rgIjRRuwYvGJ6IuiFuFWJQ0m3iAuuAoZo5GHAPqZAuGk
-dMVYu0dtCtZ68MJ/QpjBCT9RRL+mgIgfLfUSj2ZknP4nb6baiG5u28l0KId/e5IC
-iQKBnIsjxKxhLBVHSzRaS1P+vZeF2C2R9XyNy0eCnAwyCMcD0R8TVROGQ7i4ZQsM
-bMj1LPpOwhV/EGp23nD+upWOVbn/wQHOYV2kMiA/8fizmWRIWsV4/68uMA+WDP4L
-40AnJ0fcs04f9deM9P6pjlm00VD7qklYEGw6Mpr2g/M73kGh1nlAv+ImQBGlLMle
-RXyzHY3WAhzmRKWO4koFuKeR9Q0EMzk2R4/kuagdWEpM+bhwE4xPV1tPZhn9qFTz
-pQD4p/VT4qNQKOD0+aTFWre65Rt2cFFMLI7UmEHNLi0NB9JCIAi4+l+b9WQNlmaO
-C8EhOGwRzmehUyHmXM3BNW28MnyKFJ7bBFMd7uJz+vAPOrr6OzuNvVCv2I2ICkTs
-ihIj/zw5GXxkPO7YbMu9rKG0nKF1N3JB1gUJ78DHmhbjeaGSvHw85sPD0/1dPZK4
-8Gig8i62aCxf8OlJPlt8ZhBBolzs6ITUNa75Rw9fJsj3UWuv2VFaIuR57bFWmY3s
-A9KPgdf7jVQlAZKlVyli7IkyaZmxDZNFQoTdIC9uo0aggIDP8zKv0n2dBz4EUIk0
-vgEQAOO8BAR7sBdqj2RRMRNeWSA4S9GuHfV3YQARnqYsbITs1jRgAo7jx9Z5C80c
-ZOxOUVK7CJjtTqU0JB9QP/zwV9hk5i6y6aQTysclQyTNN10aXu/3zJla5Duhz+Cs
-+5UcVAmNJX9FgTMVvhKDEIY/LNmb9MoBLMut1CkDx+WPCV45WOIBCDdj2HpIjie4
-phs0/65SWjPiVg3WsFZljVxpJCGXP48Eet2bf8afYH1lx3sQMcNbyJACIPtz+YKz
-c7jIKwKSWzg1VyYikbk9eWCxcz6VKNJKi94YH9c7U8X3TdZ8G0kGYUldjYDvesyl
-nuQlcGCtSGKOAhrN/Bu2R0gpFgYl247u79CmjotefMdv8BGUDW6u9/Sep9xN3dW8
-S87h6M/tvs0ChlkDDpJedzCd7ThdikGvFRJfW/8sT/+qoTKskySQaDIeNJnxZuyK
-wELLMBvCZGpamwmnkEGhvuZWq0h/DwyTs4QAE8OVHXJSM3UN7hM4lJIUh+sRKJ1F
-AXXTdSY4cUNaS+OKtj2LJ85zFqhfAZ4pFwLCgYbJtU5hej2LnMJNbYcSkjxbk+c5
-IjkoZRF+ExjZlc0VLYNT57ZriwZ/pX42ofjOyMR/dkHQuFik/4K7v1ZemfaTdm07
-SEMBknR6OZsy/5+viEtXiih3ptTMaT9row+g+cFoxdXkisKvABEBAAH+AwMCIlVK
-Xs3x0Slgwx03cTNIoWXmishkPCJlEEdcjldz2VyQF9hjdp1VIe+npI26chKwCZqm
-U8yYbJh4UBrugUUzKKd4EfnmKfu+/BsJciFRVKwBtiolIiUImzcHPWktYLwo9yzX
-W42teShXXVgWmsJN1/6FqJdsLg8dxWesXMKoaNF4n1P7zx6vKBmDHTRz7PToaI/d
-5/nKrjED7ZT1h+qR5i9UUgbvF0ySp8mlqk/KNqHUSLDB9kf/JDg4XVtPHGGd9Ik/
-60UJ7aDfohi4Z0VgwWmfLBwcQ3It+ENtnPFufH3WHW8c1UA4wVku9tOTqyrRG6tP
-TZGiRfuwsv7Hq3pWT6rntbDkTiVgESM4C1fiZblc98iWUKGXSHqm+te1TwXOUCci
-J/gryXcjQFM8A0rwA/m+EvsoWuzoqIl3x++p3/3/mGux6UD4O7OhJNRVRz+8Mhq1
-ksrR9XkQzpq3Yv3ulTHz7l+WCRRXxw5+XWAkRHHF47Vf/na38NJQHcsCBbRIuLYR
-wBzS48cYzYkF6VejKThdQmdYJ0/fUrlUBCAJWgrfqCihFLDa1s4jJ16/fqi8a97Y
-4raVy2hrF2vFc/wet13hsaddVn4rPRAMDEGdgEmJX7MmU1emT/yaIG9lvjMpI2c5
-ADXGF2yYYa7H8zPIFyHU1RSavlT0S/K9yzIZvv+jA5KbNeGp+WWFT8MLZs0IhoCZ
-d1EgLUYAt7LPUSm2lBy1w/IL+VtYuyn/UVFo2xWiHd1ABiNWl1ji3X9Ki5613QqH
-bvn4z46voCzdZ02rYkAwrdqDr92fiBR8ctwA0AudaG6nf2ztmFKtM3E/RPMkPgKF
-8NHYc7QxS2jruJxXBtjRBMtoIaZ0+AXUO6WuEJrDLDHWaM08WKByQMm808xNCbRr
-CpiK8qyR3SwkfaOMCp22mqViirQ2KfuVvBpBT2pBYlgDKs50nE+stDjUMv+FDKAo
-5NtiyPfNtaBOYnXAEQb/hjjW5bKq7JxHSxIWAYKbNKIWgftJ3ACZAsBMHfaOCFNH
-+XLojAoxOI+0zbN6FtjN+YMU1XrLd6K49v7GEiJQZVQSfLCecVDhDU9paNROA/Xq
-/3nDCTKhd3stTPnc8ymLAwhTP0bSoFh/KtU96D9ZMC2cu9XZ+UcSQYES/ncZWcLw
-wTKrt+VwBG1z3DbV2O0ruUiXTLcZMsrwbUSDx1RVhmKZ0i42AttMdauFQ9JaX2CS
-2ddqFBS1b4X6+VCy44KkpdXsmp0NWMgm/PM3PTisCxrha7bI5/LqfXG0b+GuIFb4
-h/lEA0Ae0gMgkzm3ePAPPVlRj7kFl5Osjxm3YVRW23WWGDRF5ywIROlBjbdozA0a
-MyMgXlG9hhJseIpFveoiwqenNE5Wxg0yQbnhMUTKeCQ0xskG82P+c9bvDsevAQUR
-uv1JAGGxDd1/4nk0M5m9/Gf4Bn0uLAz29LdMg0FFUvAm2ol3U3uChm7OISU8dqFy
-JdCFACKBMzAREiXfgH2TrTxAhpy5uVcUSQV8x5J8qJ/mUoTF1WE3meXEm9CIvIAF
-Mz49KKebLS3zGFixMcKLAOKA+s/tUWO7ZZoJyQjvQVerLyDo6UixVb11LQUJQOXb
-ZIuSKV7deCgBDQ26C42SpF3rHfEQa7XH7j7tl1IIW/9DfYJYVQHaz1NTq6zcjWS2
-e+cUexBPhxbadGn0zelXr6DLJqQT7kaVeYOHlkYUHkZXdHE4CWoHqOboeB02uM/A
-e7nge1rDi57ySrsF4AVl59QJYBPR43AOVbCJAh8EGAECAAkFAlCJNL4CGwwACgkQ
-ty9exhhcp3DetA/8D/IscSBlWY3TjCD2P7t3+X34USK8EFD3QJse9dnCWOLcskFQ
-IoIfhRM752evFu2W9owEvxSQdG+otQAOqL72k1EH2g7LsADuV8I4LOYOnLyeIE9I
-b+CFPBkmzTEzrdYp6ITUU7qqgkhcgnltKGHoektIjxE8gtxCKEdyxkzazum6nCQQ
-kSBZOXVU3ezm+A2QHHP6XT1GEbdKbJ0tIuJR8ADu08pBx2c/LDBBreVStrrt1Dbz
-uR+U8MJsfLVcYX/Rw3V+KA24oLRzg91y3cfi3sNU/kmd5Cw42Tj00B+FXQny51Mq
-s4KyqHobj62II68eL5HRB2pcGsoaedQyxu2cYSeVyarBOiUPNYkoGDJoKdDyZRIB
-NNK0W+ASTf0zeHhrY/okt1ybTVtvbt6wkTEbKVePUaYmNmhre1cAj4uNwFzYjkzJ
-cm+8XWftD+TV8cE5DyVdnF00SPDuPzodRAPXaGpQUMLkE4RPr1TAwcuoPH9aFHZ/
-se6rw6TQHLd0vMk0U/DocikXpSJ1N6caE3lRwI/+nGfXNiCr8MIdofgkBeO86+G7
-k0UXS4v5FKk1nwTyt4PkFJDvAJX6rZPxIZ9NmtA5ao5vyu1DT5IhoXgDzwurAe8+
-R+y6gtA324hXIweFNt7SzYPfI4SAjunlmm8PIBf3owBrk3j+w6EQoaCreK4=
-=6HcJ
------END PGP PRIVATE KEY BLOCK-----
diff --git a/src/leap/email/smtp/tests/185CA770.pub b/src/leap/email/smtp/tests/185CA770.pub
deleted file mode 100644
index 38af19f8..00000000
--- a/src/leap/email/smtp/tests/185CA770.pub
+++ /dev/null
@@ -1,52 +0,0 @@
------BEGIN PGP PUBLIC KEY BLOCK-----
-Version: GnuPG v1.4.10 (GNU/Linux)
-
-mQINBFCJNL4BEADFsI1TCD4yq7ZqL7VhdVviTuX6JUps8/mVEhRVOZhojLcTYaqQ
-gs6T6WabRxcK7ymOnf4K8NhYdz6HFoJN46BT87etokx7J/Sl2OhpiqBQEY+jW8Rp
-+3MSGrGmvFw0s1lGrz/cXzM7UNgWSTOnYZ5nJS1veMhy0jseZOUK7ekp2oEDjGZh
-pzgd3zICCR2SvlpLIXB2Nr/CUcuRWTcc5LlKmbjMybu0E/uuY14st3JL+7qI6QX0
-atFm0VhFVpagOl0vWKxakUx4hC7j1wH2ADlCvSZPG0StSLUyHkJx3UPsmYxOZFao
-ATED3Okjwga6E7PJEbzyqAkvzw/M973kaZCUSH75ZV0cQnpdgXV3DK1gSa3d3gug
-W1lE0V7pwnN2NTOYfBMi+WloCs/bp4iZSr4QP1duZ3IqKraeBDCk7MoFo4A9Wk07
-kvqPwF9IBgatu62WVEZIzwyViN+asFUGfgp+8D7gtnlWAw0V6y/lSTzyl+dnLP98
-Hfr2eLBylFs+Kl3Pivpg2uHw09LLCrjeLEN3dj9SfBbA9jDIo9Zhs1voiIK/7Shx
-E0BRJaBgG3C4QaytYEu7RFFOKuvBai9w2Y5OfsKFo8rA7v4dxFFDvzKGujCtNnwf
-oyaGlZmMBU5MUmHUNiG8ON21COZBtK5oMScuY1VC9CQonj3OClg3IbU9SQARAQAB
-tCRkcmVicyAoZ3BnIHRlc3Qga2V5KSA8ZHJlYnNAbGVhcC5zZT6JAjgEEwECACIF
-AlCJNL4CGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJELcvXsYYXKdwXYcP
-/23T1DW6eq4CI0UbsGLxieiLohbhViUNJt4gLrgKGaORhwD6mQLhpHTFWLtHbQrW
-evDCf0KYwQk/UUS/poCIHy31Eo9mZJz+J2+m2ohubtvJdCiHf3uSAokCgZyLI8Ss
-YSwVR0s0WktT/r2XhdgtkfV8jctHgpwMMgjHA9EfE1UThkO4uGULDGzI9Sz6TsIV
-fxBqdt5w/rqVjlW5/8EBzmFdpDIgP/H4s5lkSFrFeP+vLjAPlgz+C+NAJydH3LNO
-H/XXjPT+qY5ZtNFQ+6pJWBBsOjKa9oPzO95BodZ5QL/iJkARpSzJXkV8sx2N1gIc
-5kSljuJKBbinkfUNBDM5NkeP5LmoHVhKTPm4cBOMT1dbT2YZ/ahU86UA+Kf1U+Kj
-UCjg9PmkxVq3uuUbdnBRTCyO1JhBzS4tDQfSQiAIuPpfm/VkDZZmjgvBIThsEc5n
-oVMh5lzNwTVtvDJ8ihSe2wRTHe7ic/rwDzq6+js7jb1Qr9iNiApE7IoSI/88ORl8
-ZDzu2GzLvayhtJyhdTdyQdYFCe/Ax5oW43mhkrx8PObDw9P9XT2SuPBooPIutmgs
-X/DpST5bfGYQQaJc7OiE1DWu+UcPXybI91Frr9lRWiLkee2xVpmN7APSj4HX+41U
-JQGSpVcpYuyJMmmZsQ2TRUKE3SAvbqNGoICAz/Myr9J9uQINBFCJNL4BEADjvAQE
-e7AXao9kUTETXlkgOEvRrh31d2EAEZ6mLGyE7NY0YAKO48fWeQvNHGTsTlFSuwiY
-7U6lNCQfUD/88FfYZOYusumkE8rHJUMkzTddGl7v98yZWuQ7oc/grPuVHFQJjSV/
-RYEzFb4SgxCGPyzZm/TKASzLrdQpA8fljwleOVjiAQg3Y9h6SI4nuKYbNP+uUloz
-4lYN1rBWZY1caSQhlz+PBHrdm3/Gn2B9Zcd7EDHDW8iQAiD7c/mCs3O4yCsCkls4
-NVcmIpG5PXlgsXM+lSjSSoveGB/XO1PF903WfBtJBmFJXY2A73rMpZ7kJXBgrUhi
-jgIazfwbtkdIKRYGJduO7u/Qpo6LXnzHb/ARlA1urvf0nqfcTd3VvEvO4ejP7b7N
-AoZZAw6SXncwne04XYpBrxUSX1v/LE//qqEyrJMkkGgyHjSZ8WbsisBCyzAbwmRq
-WpsJp5BBob7mVqtIfw8Mk7OEABPDlR1yUjN1De4TOJSSFIfrESidRQF103UmOHFD
-WkvjirY9iyfOcxaoXwGeKRcCwoGGybVOYXo9i5zCTW2HEpI8W5PnOSI5KGURfhMY
-2ZXNFS2DU+e2a4sGf6V+NqH4zsjEf3ZB0LhYpP+Cu79WXpn2k3ZtO0hDAZJ0ejmb
-Mv+fr4hLV4ood6bUzGk/a6MPoPnBaMXV5IrCrwARAQABiQIfBBgBAgAJBQJQiTS+
-AhsMAAoJELcvXsYYXKdw3rQP/A/yLHEgZVmN04wg9j+7d/l9+FEivBBQ90CbHvXZ
-wlji3LJBUCKCH4UTO+dnrxbtlvaMBL8UkHRvqLUADqi+9pNRB9oOy7AA7lfCOCzm
-Dpy8niBPSG/ghTwZJs0xM63WKeiE1FO6qoJIXIJ5bShh6HpLSI8RPILcQihHcsZM
-2s7pupwkEJEgWTl1VN3s5vgNkBxz+l09RhG3SmydLSLiUfAA7tPKQcdnPywwQa3l
-Ura67dQ287kflPDCbHy1XGF/0cN1figNuKC0c4Pdct3H4t7DVP5JneQsONk49NAf
-hV0J8udTKrOCsqh6G4+tiCOvHi+R0QdqXBrKGnnUMsbtnGEnlcmqwTolDzWJKBgy
-aCnQ8mUSATTStFvgEk39M3h4a2P6JLdcm01bb27esJExGylXj1GmJjZoa3tXAI+L
-jcBc2I5MyXJvvF1n7Q/k1fHBOQ8lXZxdNEjw7j86HUQD12hqUFDC5BOET69UwMHL
-qDx/WhR2f7Huq8Ok0By3dLzJNFPw6HIpF6UidTenGhN5UcCP/pxn1zYgq/DCHaH4
-JAXjvOvhu5NFF0uL+RSpNZ8E8reD5BSQ7wCV+q2T8SGfTZrQOWqOb8rtQ0+SIaF4
-A88LqwHvPkfsuoLQN9uIVyMHhTbe0s2D3yOEgI7p5ZpvDyAX96MAa5N4/sOhEKGg
-q3iu
-=RChS
------END PGP PUBLIC KEY BLOCK-----
diff --git a/src/leap/email/smtp/tests/__init__.py b/src/leap/email/smtp/tests/__init__.py
deleted file mode 100644
index 022968e1..00000000
--- a/src/leap/email/smtp/tests/__init__.py
+++ /dev/null
@@ -1,215 +0,0 @@
-import os
-import shutil
-import tempfile
-from leap.email.smtp.smtprelay import GPGWrapper
-from twisted.trial import unittest
-from leap.testing.basetest import BaseLeapTest
-
-
-class OpenPGPTestCase(unittest.TestCase, BaseLeapTest):
-
- def setUp(self):
- # mimic LeapBaseTest.setUpClass behaviour, because this is deprecated
- # in Twisted: http://twistedmatrix.com/trac/ticket/1870
- self.old_path = os.environ['PATH']
- self.old_home = os.environ['HOME']
- self.tempdir = tempfile.mkdtemp(prefix="leap_tests-")
- self.home = self.tempdir
- bin_tdir = os.path.join(
- self.tempdir,
- 'bin')
- os.environ["PATH"] = bin_tdir
- os.environ["HOME"] = self.tempdir
- # setup our own stuff
- self.gnupg_home = self.tempdir + '/gnupg'
- os.mkdir(self.gnupg_home)
- self.email = 'leap@leap.se'
- self._gpg = GPGWrapper(gpghome=self.gnupg_home)
-
- self.assertEqual(self._gpg.import_keys(PUBLIC_KEY).summary(),
- '1 imported', "error importing public key")
- self.assertEqual(self._gpg.import_keys(PRIVATE_KEY).summary(),
- # note that gnupg does not return a successful import
- # for private keys. Bug?
- '0 imported', "error importing private key")
-
- def tearDown(self):
- # mimic LeapBaseTest.tearDownClass behaviour
- os.environ["PATH"] = self.old_path
- os.environ["HOME"] = self.old_home
- # safety check
- assert self.tempdir.startswith('/tmp/leap_tests-')
- shutil.rmtree(self.tempdir)
-
- def test_openpgp_encrypt_decrypt(self):
- text = "simple raw text"
- encrypted = str(self._gpg.encrypt(text, KEY_FINGERPRINT,
- # TODO: handle always trust issue
- always_trust=True))
- self.assertNotEqual(text, encrypted, "failed encrypting text")
- decrypted = str(self._gpg.decrypt(encrypted))
- self.assertEqual(text, decrypted, "failed decrypting text")
-
-
-# Key material for testing
-KEY_FINGERPRINT = "E36E738D69173C13D709E44F2F455E2824D18DDF"
-PUBLIC_KEY = """
------BEGIN PGP PUBLIC KEY BLOCK-----
-Version: GnuPG v1.4.10 (GNU/Linux)
-
-mQINBFC9+dkBEADNRfwV23TWEoGc/x0wWH1P7PlXt8MnC2Z1kKaKKmfnglVrpOiz
-iLWoiU58sfZ0L5vHkzXHXCBf6Eiy/EtUIvdiWAn+yASJ1mk5jZTBKO/WMAHD8wTO
-zpMsFmWyg3xc4DkmFa9KQ5EVU0o/nqPeyQxNMQN7px5pPwrJtJFmPxnxm+aDkPYx
-irDmz/4DeDNqXliazGJKw7efqBdlwTHkl9Akw2gwy178pmsKwHHEMOBOFFvX61AT
-huKqHYmlCGSliwbrJppTG7jc1/ls3itrK+CWTg4txREkSpEVmfcASvw/ZqLbjgfs
-d/INMwXnR9U81O8+7LT6yw/ca4ppcFoJD7/XJbkRiML6+bJ4Dakiy6i727BzV17g
-wI1zqNvm5rAhtALKfACha6YO43aJzairO4II1wxVHvRDHZn2IuKDDephQ3Ii7/vb
-hUOf6XCSmchkAcpKXUOvbxm1yfB1LRa64mMc2RcZxf4mW7KQkulBsdV5QG2276lv
-U2UUy2IutXcGP5nXC+f6sJJGJeEToKJ57yiO/VWJFjKN8SvP+7AYsQSqINUuEf6H
-T5gCPCraGMkTUTPXrREvu7NOohU78q6zZNaL3GW8ai7eSeANSuQ8Vzffx7Wd8Y7i
-Pw9sYj0SMFs1UgjbuL6pO5ueHh+qyumbtAq2K0Bci0kqOcU4E9fNtdiovQARAQAB
-tBxMZWFwIFRlc3QgS2V5IDxsZWFwQGxlYXAuc2U+iQI3BBMBCAAhBQJQvfnZAhsD
-BQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEC9FXigk0Y3fT7EQAKH3IuRniOpb
-T/DDIgwwjz3oxB/W0DDMyPXowlhSOuM0rgGfntBpBb3boezEXwL86NPQxNGGruF5
-hkmecSiuPSvOmQlqlS95NGQp6hNG0YaKColh+Q5NTspFXCAkFch9oqUje0LdxfSP
-QfV9UpeEvGyPmk1I9EJV/YDmZ4+Djge1d7qhVZInz4Rx1NrSyF/Tc2EC0VpjQFsU
-Y9Kb2YBBR7ivG6DBc8ty0jJXi7B4WjkFcUEJviQpMF2dCLdonCehYs1PqsN1N7j+
-eFjQd+hqVMJgYuSGKjvuAEfClM6MQw7+FmFwMyLgK/Ew/DttHEDCri77SPSkOGSI
-txCzhTg6798f6mJr7WcXmHX1w1Vcib5FfZ8vTDFVhz/XgAgArdhPo9V6/1dgSSiB
-KPQ/spsco6u5imdOhckERE0lnAYvVT6KE81TKuhF/b23u7x+Wdew6kK0EQhYA7wy
-7LmlaNXc7rMBQJ9Z60CJ4JDtatBWZ0kNrt2VfdDHVdqBTOpl0CraNUjWE5YMDasr
-K2dF5IX8D3uuYtpZnxqg0KzyLg0tzL0tvOL1C2iudgZUISZNPKbS0z0v+afuAAnx
-2pTC3uezbh2Jt8SWTLhll4i0P4Ps5kZ6HQUO56O+/Z1cWovX+mQekYFmERySDR9n
-3k1uAwLilJmRmepGmvYbB8HloV8HqwgguQINBFC9+dkBEAC0I/xn1uborMgDvBtf
-H0sEhwnXBC849/32zic6udB6/3Efk9nzbSpL3FSOuXITZsZgCHPkKarnoQ2ztMcS
-sh1ke1C5gQGms75UVmM/nS+2YI4vY8OX/GC/on2vUyncqdH+bR6xH5hx4NbWpfTs
-iQHmz5C6zzS/kuabGdZyKRaZHt23WQ7JX/4zpjqbC99DjHcP9BSk7tJ8wI4bkMYD
-uFVQdT9O6HwyKGYwUU4sAQRAj7XCTGvVbT0dpgJwH4RmrEtJoHAx4Whg8mJ710E0
-GCmzf2jqkNuOw76ivgk27Kge+Hw00jmJjQhHY0yVbiaoJwcRrPKzaSjEVNgrpgP3
-lXPRGQArgESsIOTeVVHQ8fhK2YtTeCY9rIiO+L0OX2xo9HK7hfHZZWL6rqymXdyS
-fhzh/f6IPyHFWnvj7Brl7DR8heMikygcJqv+ed2yx7iLyCUJ10g12I48+aEj1aLe
-dP7lna32iY8/Z0SHQLNH6PXO9SlPcq2aFUgKqE75A/0FMk7CunzU1OWr2ZtTLNO1
-WT/13LfOhhuEq9jTyTosn0WxBjJKq18lnhzCXlaw6EAtbA7CUwsD3CTPR56aAXFK
-3I7KXOVAqggrvMe5Tpdg5drfYpI8hZovL5aAgb+7Y5ta10TcJdUhS5K3kFAWe/td
-U0cmWUMDP1UMSQ5Jg6JIQVWhSwARAQABiQIfBBgBCAAJBQJQvfnZAhsMAAoJEC9F
-Xigk0Y3fRwsP/i0ElYCyxeLpWJTwo1iCLkMKz2yX1lFVa9nT1BVTPOQwr/IAc5OX
-NdtbJ14fUsKL5pWgW8OmrXtwZm1y4euI1RPWWubG01ouzwnGzv26UcuHeqC5orZj
-cOnKtL40y8VGMm8LoicVkRJH8blPORCnaLjdOtmA3rx/v2EXrJpSa3AhOy0ZSRXk
-ZSrK68AVNwamHRoBSYyo0AtaXnkPX4+tmO8X8BPfj125IljubvwZPIW9VWR9UqCE
-VPfDR1XKegVb6VStIywF7kmrknM1C5qUY28rdZYWgKorw01hBGV4jTW0cqde3N51
-XT1jnIAa+NoXUM9uQoGYMiwrL7vNsLlyyiW5ayDyV92H/rIuiqhFgbJsHTlsm7I8
-oGheR784BagAA1NIKD1qEO9T6Kz9lzlDaeWS5AUKeXrb7ZJLI1TTCIZx5/DxjLqM
-Tt/RFBpVo9geZQrvLUqLAMwdaUvDXC2c6DaCPXTh65oCZj/hqzlJHH+RoTWWzKI+
-BjXxgUWF9EmZUBrg68DSmI+9wuDFsjZ51BcqvJwxyfxtTaWhdoYqH/UQS+D1FP3/
-diZHHlzwVwPICzM9ooNTgbrcDzyxRkIVqsVwBq7EtzcvgYUyX53yG25Giy6YQaQ2
-ZtQ/VymwFL3XdUWV6B/hU4PVAFvO3qlOtdJ6TpE+nEWgcWjCv5g7RjXX
-=MuOY
------END PGP PUBLIC KEY BLOCK-----
-"""
-PRIVATE_KEY = """
------BEGIN PGP PRIVATE KEY BLOCK-----
-Version: GnuPG v1.4.10 (GNU/Linux)
-
-lQcYBFC9+dkBEADNRfwV23TWEoGc/x0wWH1P7PlXt8MnC2Z1kKaKKmfnglVrpOiz
-iLWoiU58sfZ0L5vHkzXHXCBf6Eiy/EtUIvdiWAn+yASJ1mk5jZTBKO/WMAHD8wTO
-zpMsFmWyg3xc4DkmFa9KQ5EVU0o/nqPeyQxNMQN7px5pPwrJtJFmPxnxm+aDkPYx
-irDmz/4DeDNqXliazGJKw7efqBdlwTHkl9Akw2gwy178pmsKwHHEMOBOFFvX61AT
-huKqHYmlCGSliwbrJppTG7jc1/ls3itrK+CWTg4txREkSpEVmfcASvw/ZqLbjgfs
-d/INMwXnR9U81O8+7LT6yw/ca4ppcFoJD7/XJbkRiML6+bJ4Dakiy6i727BzV17g
-wI1zqNvm5rAhtALKfACha6YO43aJzairO4II1wxVHvRDHZn2IuKDDephQ3Ii7/vb
-hUOf6XCSmchkAcpKXUOvbxm1yfB1LRa64mMc2RcZxf4mW7KQkulBsdV5QG2276lv
-U2UUy2IutXcGP5nXC+f6sJJGJeEToKJ57yiO/VWJFjKN8SvP+7AYsQSqINUuEf6H
-T5gCPCraGMkTUTPXrREvu7NOohU78q6zZNaL3GW8ai7eSeANSuQ8Vzffx7Wd8Y7i
-Pw9sYj0SMFs1UgjbuL6pO5ueHh+qyumbtAq2K0Bci0kqOcU4E9fNtdiovQARAQAB
-AA/+JHtlL39G1wsH9R6UEfUQJGXR9MiIiwZoKcnRB2o8+DS+OLjg0JOh8XehtuCs
-E/8oGQKtQqa5bEIstX7IZoYmYFiUQi9LOzIblmp2vxOm+HKkxa4JszWci2/ZmC3t
-KtaA4adl9XVnshoQ7pijuCMUKB3naBEOAxd8s9d/JeReGIYkJErdrnVfNk5N71Ds
-FmH5Ll3XtEDvgBUQP3nkA6QFjpsaB94FHjL3gDwum/cxzj6pCglcvHOzEhfY0Ddb
-J967FozQTaf2JW3O+w3LOqtcKWpq87B7+O61tVidQPSSuzPjCtFF0D2LC9R/Hpky
-KTMQ6CaKja4MPhjwywd4QPcHGYSqjMpflvJqi+kYIt8psUK/YswWjnr3r4fbuqVY
-VhtiHvnBHQjz135lUqWvEz4hM3Xpnxydx7aRlv5NlevK8+YIO5oFbWbGNTWsPZI5
-jpoFBpSsnR1Q5tnvtNHauvoWV+XN2qAOBTG+/nEbDYH6Ak3aaE9jrpTdYh0CotYF
-q7csANsDy3JvkAzeU6WnYpsHHaAjqOGyiZGsLej1UcXPFMosE/aUo4WQhiS8Zx2c
-zOVKOi/X5vQ2GdNT9Qolz8AriwzsvFR+bxPzyd8V6ALwDsoXvwEYinYBKK8j0OPv
-OOihSR6HVsuP9NUZNU9ewiGzte/+/r6pNXHvR7wTQ8EWLcEIAN6Zyrb0bHZTIlxt
-VWur/Ht2mIZrBaO50qmM5RD3T5oXzWXi/pjLrIpBMfeZR9DWfwQwjYzwqi7pxtYx
-nJvbMuY505rfnMoYxb4J+cpRXV8MS7Dr1vjjLVUC9KiwSbM3gg6emfd2yuA93ihv
-Pe3mffzLIiQa4mRE3wtGcioC43nWuV2K2e1KjxeFg07JhrezA/1Cak505ab/tmvP
-4YmjR5c44+yL/YcQ3HdFgs4mV+nVbptRXvRcPpolJsgxPccGNdvHhsoR4gwXMS3F
-RRPD2z6x8xeN73Q4KH3bm01swQdwFBZbWVfmUGLxvN7leCdfs9+iFJyqHiCIB6Iv
-mQfp8F0IAOwSo8JhWN+V1dwML4EkIrM8wUb4yecNLkyR6TpPH/qXx4PxVMC+vy6x
-sCtjeHIwKE+9vqnlhd5zOYh7qYXEJtYwdeDDmDbL8oks1LFfd+FyAuZXY33DLwn0
-cRYsr2OEZmaajqUB3NVmj3H4uJBN9+paFHyFSXrH68K1Fk2o3n+RSf2EiX+eICwI
-L6rqoF5sSVUghBWdNegV7qfy4anwTQwrIMGjgU5S6PKW0Dr/3iO5z3qQpGPAj5OW
-ATqPWkDICLbObPxD5cJlyyNE2wCA9VVc6/1d6w4EVwSq9h3/WTpATEreXXxTGptd
-LNiTA1nmakBYNO2Iyo3djhaqBdWjk+EIAKtVEnJH9FAVwWOvaj1RoZMA5DnDMo7e
-SnhrCXl8AL7Z1WInEaybasTJXn1uQ8xY52Ua4b8cbuEKRKzw/70NesFRoMLYoHTO
-dyeszvhoDHberpGRTciVmpMu7Hyi33rM31K9epA4ib6QbbCHnxkWOZB+Bhgj1hJ8
-xb4RBYWiWpAYcg0+DAC3w9gfxQhtUlZPIbmbrBmrVkO2GVGUj8kH6k4UV6kUHEGY
-HQWQR0HcbKcXW81ZXCCD0l7ROuEWQtTe5Jw7dJ4/QFuqZnPutXVRNOZqpl6eRShw
-7X2/a29VXBpmHA95a88rSQsL+qm7Fb3prqRmuMCtrUZgFz7HLSTuUMR867QcTGVh
-cCBUZXN0IEtleSA8bGVhcEBsZWFwLnNlPokCNwQTAQgAIQUCUL352QIbAwULCQgH
-AwUVCgkICwUWAgMBAAIeAQIXgAAKCRAvRV4oJNGN30+xEACh9yLkZ4jqW0/wwyIM
-MI896MQf1tAwzMj16MJYUjrjNK4Bn57QaQW926HsxF8C/OjT0MTRhq7heYZJnnEo
-rj0rzpkJapUveTRkKeoTRtGGigqJYfkOTU7KRVwgJBXIfaKlI3tC3cX0j0H1fVKX
-hLxsj5pNSPRCVf2A5mePg44HtXe6oVWSJ8+EcdTa0shf03NhAtFaY0BbFGPSm9mA
-QUe4rxugwXPLctIyV4uweFo5BXFBCb4kKTBdnQi3aJwnoWLNT6rDdTe4/nhY0Hfo
-alTCYGLkhio77gBHwpTOjEMO/hZhcDMi4CvxMPw7bRxAwq4u+0j0pDhkiLcQs4U4
-Ou/fH+pia+1nF5h19cNVXIm+RX2fL0wxVYc/14AIAK3YT6PVev9XYEkogSj0P7Kb
-HKOruYpnToXJBERNJZwGL1U+ihPNUyroRf29t7u8flnXsOpCtBEIWAO8Muy5pWjV
-3O6zAUCfWetAieCQ7WrQVmdJDa7dlX3Qx1XagUzqZdAq2jVI1hOWDA2rKytnReSF
-/A97rmLaWZ8aoNCs8i4NLcy9Lbzi9QtornYGVCEmTTym0tM9L/mn7gAJ8dqUwt7n
-s24dibfElky4ZZeItD+D7OZGeh0FDuejvv2dXFqL1/pkHpGBZhEckg0fZ95NbgMC
-4pSZkZnqRpr2GwfB5aFfB6sIIJ0HGARQvfnZARAAtCP8Z9bm6KzIA7wbXx9LBIcJ
-1wQvOPf99s4nOrnQev9xH5PZ820qS9xUjrlyE2bGYAhz5Cmq56ENs7THErIdZHtQ
-uYEBprO+VFZjP50vtmCOL2PDl/xgv6J9r1Mp3KnR/m0esR+YceDW1qX07IkB5s+Q
-us80v5LmmxnWcikWmR7dt1kOyV/+M6Y6mwvfQ4x3D/QUpO7SfMCOG5DGA7hVUHU/
-Tuh8MihmMFFOLAEEQI+1wkxr1W09HaYCcB+EZqxLSaBwMeFoYPJie9dBNBgps39o
-6pDbjsO+or4JNuyoHvh8NNI5iY0IR2NMlW4mqCcHEazys2koxFTYK6YD95Vz0RkA
-K4BErCDk3lVR0PH4StmLU3gmPayIjvi9Dl9saPRyu4Xx2WVi+q6spl3ckn4c4f3+
-iD8hxVp74+wa5ew0fIXjIpMoHCar/nndsse4i8glCddINdiOPPmhI9Wi3nT+5Z2t
-9omPP2dEh0CzR+j1zvUpT3KtmhVICqhO+QP9BTJOwrp81NTlq9mbUyzTtVk/9dy3
-zoYbhKvY08k6LJ9FsQYySqtfJZ4cwl5WsOhALWwOwlMLA9wkz0eemgFxStyOylzl
-QKoIK7zHuU6XYOXa32KSPIWaLy+WgIG/u2ObWtdE3CXVIUuSt5BQFnv7XVNHJllD
-Az9VDEkOSYOiSEFVoUsAEQEAAQAP/1AagnZQZyzHDEgw4QELAspYHCWLXE5aZInX
-wTUJhK31IgIXNn9bJ0hFiSpQR2xeMs9oYtRuPOu0P8oOFMn4/z374fkjZy8QVY3e
-PlL+3EUeqYtkMwlGNmVw5a/NbNuNfm5Darb7pEfbYd1gPcni4MAYw7R2SG/57GbC
-9gucvspHIfOSfBNLBthDzmK8xEKe1yD2eimfc2T7IRYb6hmkYfeds5GsqvGI6mwI
-85h4uUHWRc5JOlhVM6yX8hSWx0L60Z3DZLChmc8maWnFXd7C8eQ6P1azJJbW71Ih
-7CoK0XW4LE82vlQurSRFgTwfl7wFYszW2bOzCuhHDDtYnwH86Nsu0DC78ZVRnvxn
-E8Ke/AJgrdhIOo4UAyR+aZD2+2mKd7/waOUTUrUtTzc7i8N3YXGi/EIaNReBXaq+
-ZNOp24BlFzRp+FCF/pptDW9HjPdiV09x0DgICmeZS4Gq/4vFFIahWctg52NGebT0
-Idxngjj+xDtLaZlLQoOz0n5ByjO/Wi0ANmMv1sMKCHhGvdaSws2/PbMR2r4caj8m
-KXpIgdinM/wUzHJ5pZyF2U/qejsRj8Kw8KH/tfX4JCLhiaP/mgeTuWGDHeZQERAT
-xPmRFHaLP9/ZhvGNh6okIYtrKjWTLGoXvKLHcrKNisBLSq+P2WeFrlme1vjvJMo/
-jPwLT5o9CADQmcbKZ+QQ1ZM9v99iDZol7SAMZX43JC019sx6GK0u6xouJBcLfeB4
-OXacTgmSYdTa9RM9fbfVpti01tJ84LV2SyL/VJq/enJF4XQPSynT/tFTn1PAor6o
-tEAAd8fjKdJ6LnD5wb92SPHfQfXqI84rFEO8rUNIE/1ErT6DYifDzVCbfD2KZdoF
-cOSp7TpD77sY1bs74ocBX5ejKtd+aH99D78bJSMM4pSDZsIEwnomkBHTziubPwJb
-OwnATy0LmSMAWOw5rKbsh5nfwCiUTM20xp0t5JeXd+wPVWbpWqI2EnkCEN+RJr9i
-7dp/ymDQ+Yt5wrsN3NwoyiexPOG91WQVCADdErHsnglVZZq9Z8Wx7KwecGCUurJ2
-H6lKudv5YOxPnAzqZS5HbpZd/nRTMZh2rdXCr5m2YOuewyYjvM757AkmUpM09zJX
-MQ1S67/UX2y8/74TcRF97Ncx9HeELs92innBRXoFitnNguvcO6Esx4BTe1OdU6qR
-ER3zAmVf22Le9ciXbu24DN4mleOH+OmBx7X2PqJSYW9GAMTsRB081R6EWKH7romQ
-waxFrZ4DJzZ9ltyosEJn5F32StyLrFxpcrdLUoEaclZCv2qka7sZvi0EvovDVEBU
-e10jOx9AOwf8Gj2ufhquQ6qgVYCzbP+YrodtkFrXRS3IsljIchj1M2ffB/0bfoUs
-rtER9pLvYzCjBPg8IfGLw0o754Qbhh/ReplCRTusP/fQMybvCvfxreS3oyEriu/G
-GufRomjewZ8EMHDIgUsLcYo2UHZsfF7tcazgxMGmMvazp4r8vpgrvW/8fIN/6Adu
-tF+WjWDTvJLFJCe6O+BFJOWrssNrrra1zGtLC1s8s+Wfpe+bGPL5zpHeebGTwH1U
-22eqgJArlEKxrfarz7W5+uHZJHSjF/K9ZvunLGD0n9GOPMpji3UO3zeM8IYoWn7E
-/EWK1XbjnssNemeeTZ+sDh+qrD7BOi+vCX1IyBxbfqnQfJZvmcPWpruy1UsO+aIC
-0GY8Jr3OL69dDQ21jueJAh8EGAEIAAkFAlC9+dkCGwwACgkQL0VeKCTRjd9HCw/+
-LQSVgLLF4ulYlPCjWIIuQwrPbJfWUVVr2dPUFVM85DCv8gBzk5c121snXh9Swovm
-laBbw6ate3BmbXLh64jVE9Za5sbTWi7PCcbO/bpRy4d6oLmitmNw6cq0vjTLxUYy
-bwuiJxWREkfxuU85EKdouN062YDevH+/YResmlJrcCE7LRlJFeRlKsrrwBU3BqYd
-GgFJjKjQC1peeQ9fj62Y7xfwE9+PXbkiWO5u/Bk8hb1VZH1SoIRU98NHVcp6BVvp
-VK0jLAXuSauSczULmpRjbyt1lhaAqivDTWEEZXiNNbRyp17c3nVdPWOcgBr42hdQ
-z25CgZgyLCsvu82wuXLKJblrIPJX3Yf+si6KqEWBsmwdOWybsjygaF5HvzgFqAAD
-U0goPWoQ71PorP2XOUNp5ZLkBQp5etvtkksjVNMIhnHn8PGMuoxO39EUGlWj2B5l
-Cu8tSosAzB1pS8NcLZzoNoI9dOHrmgJmP+GrOUkcf5GhNZbMoj4GNfGBRYX0SZlQ
-GuDrwNKYj73C4MWyNnnUFyq8nDHJ/G1NpaF2hiof9RBL4PUU/f92JkceXPBXA8gL
-Mz2ig1OButwPPLFGQhWqxXAGrsS3Ny+BhTJfnfIbbkaLLphBpDZm1D9XKbAUvdd1
-RZXoH+FTg9UAW87eqU610npOkT6cRaBxaMK/mDtGNdc=
-=JTFu
------END PGP PRIVATE KEY BLOCK-----
-"""
diff --git a/src/leap/email/smtp/tests/mail.txt b/src/leap/email/smtp/tests/mail.txt
deleted file mode 100644
index 95420470..00000000
--- a/src/leap/email/smtp/tests/mail.txt
+++ /dev/null
@@ -1,10 +0,0 @@
-HELO drebs@riseup.net
-MAIL FROM: drebs@riseup.net
-RCPT TO: drebs@riseup.net
-RCPT TO: drebs@leap.se
-DATA
-Subject: leap test
-
-Hello world!
-.
-QUIT
diff --git a/src/leap/email/smtp/tests/test_smtprelay.py b/src/leap/email/smtp/tests/test_smtprelay.py
deleted file mode 100644
index 1c549eb3..00000000
--- a/src/leap/email/smtp/tests/test_smtprelay.py
+++ /dev/null
@@ -1,75 +0,0 @@
-from datetime import datetime
-import re
-from leap.email.smtp.smtprelay import (
- SMTPFactory,
- #SMTPDelivery, # an object
- EncryptedMessage,
-)
-from leap.email.smtp import tests
-from twisted.test import proto_helpers
-from twisted.mail.smtp import User
-
-
-# some regexps
-IP_REGEX = "(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}" + \
- "([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])"
-HOSTNAME_REGEX = "(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*" + \
- "([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])"
-IP_OR_HOST_REGEX = '(' + IP_REGEX + '|' + HOSTNAME_REGEX + ')'
-
-
-class TestSmtpRelay(tests.OpenPGPTestCase):
-
- EMAIL_DATA = ['HELO relay.leap.se',
- 'MAIL FROM: <user@leap.se>',
- 'RCPT TO: <leap@leap.se>',
- 'DATA',
- 'From: User <user@leap.se>',
- 'To: Leap <leap@leap.se>',
- 'Date: ' + datetime.now().strftime('%c'),
- 'Subject: test message',
- '',
- 'This is a secret message.',
- 'Yours,',
- 'A.',
- '',
- '.',
- 'QUIT']
-
- def assertMatch(self, string, pattern, msg=None):
- if not re.match(pattern, string):
- msg = self._formatMessage(msg, '"%s" does not match pattern "%s".'
- % (string, pattern))
- raise self.failureException(msg)
-
- def test_relay_accepts_valid_email(self):
- """
- Test if SMTP server responds correctly for valid interaction.
- """
-
- SMTP_ANSWERS = ['220 ' + IP_OR_HOST_REGEX +
- ' NO UCE NO UBE NO RELAY PROBES',
- '250 ' + IP_OR_HOST_REGEX + ' Hello ' +
- IP_OR_HOST_REGEX + ', nice to meet you',
- '250 Sender address accepted',
- '250 Recipient address accepted',
- '354 Continue']
- proto = SMTPFactory(self._gpg).buildProtocol(('127.0.0.1', 0))
- transport = proto_helpers.StringTransport()
- proto.makeConnection(transport)
- for i, line in enumerate(self.EMAIL_DATA):
- proto.lineReceived(line + '\r\n')
- self.assertMatch(transport.value(),
- '\r\n'.join(SMTP_ANSWERS[0:i + 1]))
- proto.setTimeout(None)
-
- def test_message_encrypt(self):
- proto = SMTPFactory(self._gpg).buildProtocol(('127.0.0.1', 0))
- user = User('leap@leap.se', 'relay.leap.se', proto, 'leap@leap.se')
- m = EncryptedMessage(user, self._gpg)
- for line in self.EMAIL_DATA[4:12]:
- m.lineReceived(line)
- m.parseMessage()
- m.encrypt()
- decrypted = str(self._gpg.decrypt(m.cyphertext))
- self.assertEqual('\n'.join(self.EMAIL_DATA[9:12]), decrypted)
diff --git a/src/leap/gui/__init__.py b/src/leap/gui/__init__.py
deleted file mode 100644
index 804bfbc1..00000000
--- a/src/leap/gui/__init__.py
+++ /dev/null
@@ -1,11 +0,0 @@
-try:
- import sip
- sip.setapi('QString', 2)
- sip.setapi('QVariant', 2)
-except ValueError:
- pass
-
-import firstrun
-import firstrun.wizard
-
-__all__ = ['firstrun', 'firstrun.wizard']
diff --git a/src/leap/gui/constants.py b/src/leap/gui/constants.py
deleted file mode 100644
index 277f3540..00000000
--- a/src/leap/gui/constants.py
+++ /dev/null
@@ -1,13 +0,0 @@
-import time
-
-APP_LOGO = ':/images/leap-color-small.png'
-
-# bare is the username portion of a JID
-# full includes the "at" and some extra chars
-# that can be allowed for fqdn
-
-BARE_USERNAME_REGEX = r"^[A-Za-z\d_]+$"
-FULL_USERNAME_REGEX = r"^[A-Za-z\d_@.-]+$"
-
-GUI_PAUSE_FOR_USER_SECONDS = 1
-pause_for_user = lambda: time.sleep(GUI_PAUSE_FOR_USER_SECONDS)
diff --git a/src/leap/gui/firstrun/__init__.py b/src/leap/gui/firstrun/__init__.py
deleted file mode 100644
index 2a523d6a..00000000
--- a/src/leap/gui/firstrun/__init__.py
+++ /dev/null
@@ -1,28 +0,0 @@
-try:
- import sip
- sip.setapi('QString', 2)
- sip.setapi('QVariant', 2)
-except ValueError:
- pass
-
-import intro
-import connect
-import last
-import login
-import mixins
-import providerinfo
-import providerselect
-import providersetup
-import register
-
-__all__ = [
- 'intro',
- 'connect',
- 'last',
- 'login',
- 'mixins',
- 'providerinfo',
- 'providerselect',
- 'providersetup',
- 'register',
- ] # ,'wizard']
diff --git a/src/leap/gui/firstrun/connect.py b/src/leap/gui/firstrun/connect.py
deleted file mode 100644
index ad7bb13a..00000000
--- a/src/leap/gui/firstrun/connect.py
+++ /dev/null
@@ -1,214 +0,0 @@
-"""
-Provider Setup Validation Page,
-used in First Run Wizard
-"""
-import logging
-
-from PyQt4 import QtGui
-
-#import requests
-
-from leap.gui.progress import ValidationPage
-from leap.util.web import get_https_domain_and_port
-
-from leap.base import auth
-from leap.gui.constants import APP_LOGO
-
-logger = logging.getLogger(__name__)
-
-
-class ConnectionPage(ValidationPage):
-
- def __init__(self, parent=None):
- super(ConnectionPage, self).__init__(parent)
- self.current_page = "connect"
-
- title = self.tr("Connecting...")
- subtitle = self.tr("Setting up a encrypted "
- "connection with the provider")
-
- self.setTitle(title)
- self.setSubTitle(subtitle)
-
- self.setPixmap(
- QtGui.QWizard.LogoPixmap,
- QtGui.QPixmap(APP_LOGO))
-
- def _do_checks(self, update_signal=None):
- """
- executes actual checks in a separate thread
-
- we initialize the srp protocol register
- and try to register user.
- """
- wizard = self.wizard()
- full_domain = self.field('provider_domain')
- domain, port = get_https_domain_and_port(full_domain)
-
- pconfig = wizard.eipconfigchecker(domain=domain)
- # this should be persisted...
- pconfig.defaultprovider.load()
- pconfig.set_api_domain()
-
- pCertChecker = wizard.providercertchecker(
- domain=domain)
- pCertChecker.set_api_domain(pconfig.apidomain)
-
- ###########################################
- # Set Credentials.
- # username and password are in different fields
- # if they were stored in log_in or sign_up pages.
- from_login = wizard.from_login
-
- unamek_base = 'userName'
- passwk_base = 'userPassword'
- unamek = 'login_%s' % unamek_base if from_login else unamek_base
- passwk = 'login_%s' % passwk_base if from_login else passwk_base
-
- username = self.field(unamek)
- password = self.field(passwk)
- credentials = username, password
-
- yield(("head_sentinel", 0), lambda: None)
-
- ##################################################
- # 1) fetching eip service config
- ##################################################
- def fetcheipconf():
- try:
- pconfig.fetch_eip_service_config()
-
- # XXX get specific exception
- except Exception as exc:
- return self.fail(exc.message)
-
- yield((self.tr("Getting EIP configuration files"), 40),
- fetcheipconf)
-
- ##################################################
- # 2) getting client certificate
- ##################################################
-
- def fetcheipcert():
- try:
- downloaded = pCertChecker.download_new_client_cert(
- credentials=credentials)
- if not downloaded:
- logger.error('Could not download client cert')
- return False
-
- except auth.SRPAuthenticationError as exc:
- return self.fail(self.tr(
- "Authentication error: %s" % exc.message))
-
- except Exception as exc:
- return self.fail(exc.message)
- else:
- return True
-
- yield((self.tr("Getting EIP certificate"), 80),
- fetcheipcert)
-
- ################
- # end !
- ################
- self.set_done()
- yield(("end_sentinel", 100), lambda: None)
-
- def on_checks_validation_ready(self):
- """
- called after _do_checks has finished
- (connected to checker thread finished signal)
- """
- # here we go! :)
- if self.is_done():
- nextbutton = self.wizard().button(QtGui.QWizard.NextButton)
- nextbutton.setFocus()
-
- full_domain = self.field('provider_domain')
- domain, port = get_https_domain_and_port(full_domain)
- _domain = u"%s:%s" % (
- domain, port) if port != 443 else unicode(domain)
- self.run_eip_checks_for_provider_and_connect(_domain)
-
- def run_eip_checks_for_provider_and_connect(self, domain):
- wizard = self.wizard()
- conductor = wizard.conductor
- start_eip_signal = getattr(
- wizard,
- 'start_eipconnection_signal', None)
-
- if conductor:
- conductor.set_provider_domain(domain)
- # we could run some of the checks to be
- # sure everything is in order, but
- # I see no point in doing it, we assume
- # we've gone thru all checks during the wizard.
- #conductor.run_checks()
- #self.conductor = conductor
- #errors = self.eip_error_check()
- #if not errors and start_eip_signal:
- if start_eip_signal:
- start_eip_signal.emit()
-
- else:
- logger.warning(
- "No conductor found. This means that "
- "probably the wizard has been launched "
- "in an stand-alone way.")
-
- self.set_done()
-
- #def eip_error_check(self):
- #"""
- #a version of the main app error checker,
- #but integrated within the connecting page of the wizard.
- #consumes the conductor error queue.
- #pops errors, and add those to the wizard page
- #"""
- # TODO handle errors.
- # We should redirect them to the log viewer
- # with a brief message.
- # XXX move to LAST PAGE instead.
- #logger.debug('eip error check from connecting page')
- #errq = self.conductor.error_queue
-
- #def _do_validation(self):
- #"""
- #called after _do_checks has finished
- #(connected to checker thread finished signal)
- #"""
- #from_login = self.wizard().from_login
- #prevpage = "login" if from_login else "signup"
-
- #wizard = self.wizard()
- #if self.errors:
- #logger.debug('going back with errors')
- #logger.error(self.errors)
- #name, first_error = self.pop_first_error()
- #wizard.set_validation_error(
- #prevpage,
- #first_error)
- #self.go_back()
-
- def nextId(self):
- wizard = self.wizard()
- return wizard.get_page_index('lastpage')
-
- def initializePage(self):
- super(ConnectionPage, self).initializePage()
- self.set_undone()
- cancelbutton = self.wizard().button(QtGui.QWizard.CancelButton)
- cancelbutton.hide()
- self.completeChanged.emit()
-
- wizard = self.wizard()
- eip_statuschange_signal = wizard.eip_statuschange_signal
- if eip_statuschange_signal:
- eip_statuschange_signal.connect(
- lambda status: self.send_status(
- status))
-
- def send_status(self, status):
- wizard = self.wizard()
- wizard.openvpn_status.append(status)
diff --git a/src/leap/gui/firstrun/constants.py b/src/leap/gui/firstrun/constants.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/gui/firstrun/constants.py
+++ /dev/null
diff --git a/src/leap/gui/firstrun/intro.py b/src/leap/gui/firstrun/intro.py
deleted file mode 100644
index b519362f..00000000
--- a/src/leap/gui/firstrun/intro.py
+++ /dev/null
@@ -1,68 +0,0 @@
-"""
-Intro page used in first run wizard
-"""
-
-from PyQt4 import QtGui
-
-from leap.gui.constants import APP_LOGO
-
-
-class IntroPage(QtGui.QWizardPage):
- def __init__(self, parent=None):
- super(IntroPage, self).__init__(parent)
-
- self.setTitle(self.tr("First run wizard"))
-
- #self.setPixmap(
- #QtGui.QWizard.WatermarkPixmap,
- #QtGui.QPixmap(':/images/watermark1.png'))
-
- self.setPixmap(
- QtGui.QWizard.LogoPixmap,
- QtGui.QPixmap(APP_LOGO))
-
- label = QtGui.QLabel(self.tr(
- "Now we will guide you through "
- "some configuration that is needed before you "
- "can connect for the first time.<br><br>"
- "If you ever need to modify these options again, "
- "you can find the wizard in the '<i>Settings</i>' menu from the "
- "main window.<br><br>"
- "Do you want to <b>sign up</b> for a new account, or <b>log "
- "in</b> with an already existing username?<br>"))
- label.setWordWrap(True)
-
- radiobuttonGroup = QtGui.QGroupBox()
-
- self.sign_up = QtGui.QRadioButton(
- self.tr("Sign up for a new account"))
- self.sign_up.setChecked(True)
- self.log_in = QtGui.QRadioButton(
- self.tr("Log In with my credentials"))
-
- radiobLayout = QtGui.QVBoxLayout()
- radiobLayout.addWidget(self.sign_up)
- radiobLayout.addWidget(self.log_in)
- radiobuttonGroup.setLayout(radiobLayout)
-
- layout = QtGui.QVBoxLayout()
- layout.addWidget(label)
- layout.addWidget(radiobuttonGroup)
- self.setLayout(layout)
-
- #self.registerField('is_signup', self.sign_up)
-
- def validatePage(self):
- return True
-
- def nextId(self):
- """
- returns next id
- in a non-linear wizard
- """
- if self.sign_up.isChecked():
- next_ = 'providerselection'
- if self.log_in.isChecked():
- next_ = 'login'
- wizard = self.wizard()
- return wizard.get_page_index(next_)
diff --git a/src/leap/gui/firstrun/last.py b/src/leap/gui/firstrun/last.py
deleted file mode 100644
index f3e467db..00000000
--- a/src/leap/gui/firstrun/last.py
+++ /dev/null
@@ -1,119 +0,0 @@
-"""
-Last Page, used in First Run Wizard
-"""
-import logging
-
-from PyQt4 import QtGui
-
-from leap.util.coroutines import coroutine
-from leap.gui.constants import APP_LOGO
-
-logger = logging.getLogger(__name__)
-
-
-class LastPage(QtGui.QWizardPage):
- def __init__(self, parent=None):
- super(LastPage, self).__init__(parent)
-
- self.setTitle(self.tr(
- "Connecting to Encrypted Internet Proxy service..."))
-
- self.setPixmap(
- QtGui.QWizard.LogoPixmap,
- QtGui.QPixmap(APP_LOGO))
-
- #self.setPixmap(
- #QtGui.QWizard.WatermarkPixmap,
- #QtGui.QPixmap(':/images/watermark2.png'))
-
- self.label = QtGui.QLabel()
- self.label.setWordWrap(True)
-
- self.wizard_done = False
-
- # XXX REFACTOR to a Validating Page...
- self.status_line_1 = QtGui.QLabel()
- self.status_line_2 = QtGui.QLabel()
- self.status_line_3 = QtGui.QLabel()
- self.status_line_4 = QtGui.QLabel()
- self.status_line_5 = QtGui.QLabel()
-
- layout = QtGui.QVBoxLayout()
- layout.addWidget(self.label)
-
- # make loop
- layout.addWidget(self.status_line_1)
- layout.addWidget(self.status_line_2)
- layout.addWidget(self.status_line_3)
- layout.addWidget(self.status_line_4)
- layout.addWidget(self.status_line_5)
-
- self.setLayout(layout)
-
- def isComplete(self):
- return self.wizard_done
-
- def set_status_line(self, line, status):
- statusline = getattr(self, 'status_line_%s' % line)
- if statusline:
- statusline.setText(status)
-
- def set_finished_status(self):
- self.setTitle(self.tr('You are now using an encrypted connection!'))
- finishText = self.wizard().buttonText(
- QtGui.QWizard.FinishButton)
- finishText = finishText.replace('&', '')
- self.label.setText(self.tr(
- "Click '<i>%s</i>' to end the wizard and "
- "save your settings." % finishText))
- self.wizard_done = True
- self.completeChanged.emit()
-
- @coroutine
- def eip_status_handler(self):
- # XXX this can be changed to use
- # signals. See progress.py
- logger.debug('logging status in last page')
- self.validation_done = False
- status_count = 1
- try:
- while True:
- status = (yield)
- status_count += 1
- # XXX add to line...
- logger.debug('status --> %s', status)
- self.set_status_line(status_count, status)
- if status == "connected":
- self.set_finished_status()
- self.completeChanged.emit()
- break
- self.completeChanged.emit()
- except GeneratorExit:
- pass
- except StopIteration:
- pass
-
- def initializePage(self):
- super(LastPage, self).initializePage()
- wizard = self.wizard()
- wizard.button(QtGui.QWizard.FinishButton).setDisabled(True)
-
- handler = self.eip_status_handler()
-
- # get statuses done in prev page
- for st in wizard.openvpn_status:
- self.send_status(handler.send, st)
-
- # bind signal for events yet to come
- eip_statuschange_signal = wizard.eip_statuschange_signal
- if eip_statuschange_signal:
- eip_statuschange_signal.connect(
- lambda status: self.send_status(
- handler.send, status))
- self.completeChanged.emit()
-
- def send_status(self, cb, status):
- try:
- cb(status)
- except StopIteration:
- pass
diff --git a/src/leap/gui/firstrun/login.py b/src/leap/gui/firstrun/login.py
deleted file mode 100644
index 3707d3ff..00000000
--- a/src/leap/gui/firstrun/login.py
+++ /dev/null
@@ -1,332 +0,0 @@
-"""
-LogIn Page, used inf First Run Wizard
-"""
-from PyQt4 import QtCore
-from PyQt4 import QtGui
-
-import requests
-
-from leap.base import auth
-from leap.gui.firstrun.mixins import UserFormMixIn
-from leap.gui.progress import InlineValidationPage
-from leap.gui import styles
-
-from leap.gui.constants import APP_LOGO, FULL_USERNAME_REGEX
-
-
-class LogInPage(InlineValidationPage, UserFormMixIn): # InlineValidationPage
-
- def __init__(self, parent=None):
-
- super(LogInPage, self).__init__(parent)
- self.current_page = "login"
-
- self.setTitle(self.tr("Log In"))
- self.setSubTitle(self.tr("Log in with your credentials"))
- self.current_page = "login"
-
- self.setPixmap(
- QtGui.QWizard.LogoPixmap,
- QtGui.QPixmap(APP_LOGO))
-
- self.setupSteps()
- self.setupUI()
-
- self.do_confirm_next = False
-
- def setupUI(self):
- userNameLabel = QtGui.QLabel(self.tr("User &name:"))
- userNameLineEdit = QtGui.QLineEdit()
- userNameLineEdit.cursorPositionChanged.connect(
- self.reset_validation_status)
- userNameLabel.setBuddy(userNameLineEdit)
-
- # let's add regex validator
- usernameRe = QtCore.QRegExp(FULL_USERNAME_REGEX)
- userNameLineEdit.setValidator(
- QtGui.QRegExpValidator(usernameRe, self))
-
- #userNameLineEdit.setPlaceholderText(
- #'username@provider.example.org')
- self.userNameLineEdit = userNameLineEdit
-
- userPasswordLabel = QtGui.QLabel(self.tr("&Password:"))
- self.userPasswordLineEdit = QtGui.QLineEdit()
- self.userPasswordLineEdit.setEchoMode(
- QtGui.QLineEdit.Password)
- userPasswordLabel.setBuddy(self.userPasswordLineEdit)
-
- self.registerField('login_userName*', self.userNameLineEdit)
- self.registerField('login_userPassword*', self.userPasswordLineEdit)
-
- layout = QtGui.QGridLayout()
- layout.setColumnMinimumWidth(0, 20)
-
- validationMsg = QtGui.QLabel("")
- validationMsg.setStyleSheet(styles.ErrorLabelStyleSheet)
- self.validationMsg = validationMsg
-
- layout.addWidget(validationMsg, 0, 3)
- layout.addWidget(userNameLabel, 1, 0)
- layout.addWidget(self.userNameLineEdit, 1, 3)
- layout.addWidget(userPasswordLabel, 2, 0)
- layout.addWidget(self.userPasswordLineEdit, 2, 3)
-
- # add validation frame
- self.setupValidationFrame()
- layout.addWidget(self.valFrame, 4, 2, 4, 2)
- self.valFrame.hide()
-
- self.nextText(self.tr("Log in"))
- self.setLayout(layout)
-
- #self.registerField('is_login_wizard')
-
- # actual checks
-
- def _do_checks(self):
-
- full_username = self.userNameLineEdit.text()
- ###########################
- # 0) check user@domain form
- ###########################
-
- def checkusername():
- if full_username.count('@') != 1:
- return self.fail(
- self.tr(
- "Username must be in the username@provider form."))
- else:
- return True
-
- yield(("head_sentinel", 0), checkusername)
-
- username, domain = full_username.split('@')
- password = self.userPasswordLineEdit.text()
-
- # We try a call to an authenticated
- # page here as a mean to catch
- # srp authentication errors while
- wizard = self.wizard()
- eipconfigchecker = wizard.eipconfigchecker(domain=domain)
-
- ########################
- # 1) try name resolution
- ########################
- # show the frame before going on...
- QtCore.QMetaObject.invokeMethod(
- self, "showStepsFrame")
-
- # Able to contact domain?
- # can get definition?
- # two-by-one
- def resolvedomain():
- try:
- eipconfigchecker.fetch_definition(domain=domain)
-
- # we're using requests here for all
- # the possible error cases that it catches.
- except requests.exceptions.ConnectionError as exc:
- return self.fail(exc.message[1])
- except requests.exceptions.HTTPError as exc:
- return self.fail(exc.message)
- except Exception as exc:
- # XXX get catchall error msg
- return self.fail(
- exc.message)
- else:
- return True
-
- yield((self.tr("Resolving domain name"), 20), resolvedomain)
-
- wizard.set_providerconfig(
- eipconfigchecker.defaultprovider.config)
-
- ########################
- # 2) do authentication
- ########################
- credentials = username, password
- pCertChecker = wizard.providercertchecker(
- domain=domain)
-
- def validate_credentials():
- #################
- # FIXME #BUG #638
- verify = False
-
- try:
- pCertChecker.download_new_client_cert(
- credentials=credentials,
- verify=verify)
-
- except auth.SRPAuthenticationError as exc:
- return self.fail(
- self.tr("Authentication error: %s" % exc.message))
-
- except Exception as exc:
- return self.fail(exc.message)
-
- else:
- return True
-
- yield(('Validating credentials', 60), validate_credentials)
-
- self.set_done()
- yield(("end_sentinel", 100), lambda: None)
-
- def green_validation_status(self):
- val = self.validationMsg
- val.setText(self.tr('Credentials validated.'))
- val.setStyleSheet(styles.GreenLineEdit)
-
- def on_checks_validation_ready(self):
- """
- after checks
- """
- if self.is_done():
- self.disableFields()
- self.cleanup_errormsg()
- self.clean_wizard_errors(self.current_page)
- # make the user confirm the transition
- # to next page.
- self.nextText('&Next')
- self.nextFocus()
- self.green_validation_status()
- self.do_confirm_next = True
-
- # ui update
-
- def nextText(self, text):
- self.setButtonText(
- QtGui.QWizard.NextButton, text)
-
- def nextFocus(self):
- self.wizard().button(
- QtGui.QWizard.NextButton).setFocus()
-
- def disableNextButton(self):
- self.wizard().button(
- QtGui.QWizard.NextButton).setDisabled(True)
-
- def onUserNamePositionChanged(self, *args):
- if self.initial_username_sample:
- self.userNameLineEdit.setText('')
- # XXX set regular color
- self.initial_username_sample = None
-
- def onUserNameTextChanged(self, *args):
- if self.initial_username_sample:
- k = args[0][-1]
- self.initial_username_sample = None
- self.userNameLineEdit.setText(k)
-
- def disableFields(self):
- for field in (self.userNameLineEdit,
- self.userPasswordLineEdit):
- field.setDisabled(True)
-
- def populateErrors(self):
- # XXX could move this to ValidationMixin
- # used in providerselect and register too
-
- errors = self.wizard().get_validation_error(
- self.current_page)
- showerr = self.validationMsg.setText
-
- if errors:
- bad_str = getattr(self, 'bad_string', None)
- cur_str = self.userNameLineEdit.text()
-
- if bad_str is None:
- # first time we fall here.
- # save the current bad_string value
- self.bad_string = cur_str
- showerr(errors)
- else:
- # not the first time
- if cur_str == bad_str:
- showerr(errors)
- else:
- self.focused_field = False
- showerr('')
-
- def cleanup_errormsg(self):
- """
- we reset bad_string to None
- should be called before leaving the page
- """
- self.bad_string = None
-
- def paintEvent(self, event):
- """
- we hook our populate errors
- on paintEvent because we need it to catch
- when user enters the page coming from next,
- and initializePage does not cover that case.
- Maybe there's a better event to hook upon.
- """
- super(LogInPage, self).paintEvent(event)
- self.populateErrors()
-
- def set_prevalidation_error(self, error):
- self.prevalidation_error = error
-
- # pagewizard methods
-
- def nextId(self):
- wizard = self.wizard()
- if not wizard:
- return
- if wizard.is_provider_setup is False:
- next_ = 'providersetupvalidation'
- if wizard.is_provider_setup is True:
- # XXX bad name, ok, gonna change that
- next_ = 'signupvalidation'
- return wizard.get_page_index(next_)
-
- def initializePage(self):
- super(LogInPage, self).initializePage()
- username = self.userNameLineEdit
- username.setText('username@provider.example.org')
- username.cursorPositionChanged.connect(
- self.onUserNamePositionChanged)
- username.textChanged.connect(
- self.onUserNameTextChanged)
- self.initial_username_sample = True
- self.validationMsg.setText('')
- self.valFrame.hide()
-
- def reset_validation_status(self):
- """
- empty the validation msg
- and clean the inline validation widget.
- """
- self.validationMsg.setText('')
- self.steps.removeAllSteps()
- self.clearTable()
-
- def validatePage(self):
- """
- if not register done, do checks.
- if done, wait for click.
- """
- self.disableNextButton()
- self.cleanup_errormsg()
- self.clean_wizard_errors(self.current_page)
-
- if self.do_confirm_next:
- full_username = self.userNameLineEdit.text()
- password = self.userPasswordLineEdit.text()
- username, domain = full_username.split('@')
- self.setField('provider_domain', domain)
- self.setField('login_userName', username)
- self.setField('login_userPassword', password)
- self.wizard().from_login = True
-
- return True
-
- if not self.is_done():
- self.reset_validation_status()
- self.do_checks()
-
- return self.is_done()
diff --git a/src/leap/gui/firstrun/mixins.py b/src/leap/gui/firstrun/mixins.py
deleted file mode 100644
index c4731893..00000000
--- a/src/leap/gui/firstrun/mixins.py
+++ /dev/null
@@ -1,18 +0,0 @@
-"""
-mixins used in First Run Wizard
-"""
-
-
-class UserFormMixIn(object):
-
- def reset_validation_status(self):
- """
- empty the validation msg
- """
- self.validationMsg.setText('')
-
- def set_validation_status(self, msg):
- """
- set generic validation status
- """
- self.validationMsg.setText(msg)
diff --git a/src/leap/gui/firstrun/providerinfo.py b/src/leap/gui/firstrun/providerinfo.py
deleted file mode 100644
index cff4caca..00000000
--- a/src/leap/gui/firstrun/providerinfo.py
+++ /dev/null
@@ -1,106 +0,0 @@
-"""
-Provider Info Page, used in First run Wizard
-"""
-import logging
-
-from PyQt4 import QtGui
-
-from leap.gui.constants import APP_LOGO
-from leap.util.translations import translate
-
-logger = logging.getLogger(__name__)
-
-
-class ProviderInfoPage(QtGui.QWizardPage):
-
- def __init__(self, parent=None):
- super(ProviderInfoPage, self).__init__(parent)
-
- self.setTitle(self.tr("Provider Information"))
- self.setSubTitle(self.tr(
- "Services offered by this provider"))
-
- self.setPixmap(
- QtGui.QWizard.LogoPixmap,
- QtGui.QPixmap(APP_LOGO))
-
- self.create_info_panel()
-
- def create_info_panel(self):
- # Use stacked widget instead
- # of reparenting the layout.
-
- infoWidget = QtGui.QStackedWidget()
-
- info = QtGui.QWidget()
- layout = QtGui.QVBoxLayout()
-
- displayName = QtGui.QLabel("")
- description = QtGui.QLabel("")
- enrollment_policy = QtGui.QLabel("")
-
- # XXX set stylesheet...
- # prettify a little bit.
- # bigger fonts and so on...
-
- # We could use a QFrame here
-
- layout.addWidget(displayName)
- layout.addWidget(description)
- layout.addWidget(enrollment_policy)
- layout.addStretch(1)
-
- info.setLayout(layout)
- infoWidget.addWidget(info)
-
- pageLayout = QtGui.QVBoxLayout()
- pageLayout.addWidget(infoWidget)
- self.setLayout(pageLayout)
-
- # add refs to self to allow for
- # updates.
- # Watch out! Have to get rid of these references!
- # this should be better handled with signals !!
- self.displayName = displayName
- self.description = description
- self.description.setWordWrap(True)
- self.enrollment_policy = enrollment_policy
-
- def show_provider_info(self):
-
- # XXX get multilingual objects
- # directly from the config object
-
- lang = "en"
- pconfig = self.wizard().providerconfig
-
- dn = pconfig.get('name')
- display_name = dn[lang] if dn else ''
- domain_name = self.field('provider_domain')
-
- self.displayName.setText(
- "<b>%s</b> https://%s" % (display_name, domain_name))
-
- desc = pconfig.get('description')
-
- #description_text = desc[lang] if desc else ''
- description_text = translate(desc) if desc else ''
-
- self.description.setText(
- "<i>%s</i>" % description_text)
-
- # XXX should translate this...
- enroll = pconfig.get('enrollment_policy')
- if enroll:
- self.enrollment_policy.setText(
- '<b>%s</b>: <em>%s</em>' % (
- self.tr('enrollment policy'),
- enroll))
-
- def nextId(self):
- wizard = self.wizard()
- next_ = "providersetupvalidation"
- return wizard.get_page_index(next_)
-
- def initializePage(self):
- self.show_provider_info()
diff --git a/src/leap/gui/firstrun/providerselect.py b/src/leap/gui/firstrun/providerselect.py
deleted file mode 100644
index 917b16fd..00000000
--- a/src/leap/gui/firstrun/providerselect.py
+++ /dev/null
@@ -1,471 +0,0 @@
-"""
-Select Provider Page, used in First Run Wizard
-"""
-import logging
-
-import requests
-
-from PyQt4 import QtCore
-from PyQt4 import QtGui
-
-from leap.base import exceptions as baseexceptions
-#from leap.crypto import certs
-from leap.eip import exceptions as eipexceptions
-from leap.gui.progress import InlineValidationPage
-from leap.gui import styles
-from leap.gui.utils import delay
-from leap.util.web import get_https_domain_and_port
-
-from leap.gui.constants import APP_LOGO
-
-logger = logging.getLogger(__name__)
-
-
-class SelectProviderPage(InlineValidationPage):
-
- launchChecks = QtCore.pyqtSignal()
-
- def __init__(self, parent=None, providers=None):
- super(SelectProviderPage, self).__init__(parent)
- self.current_page = 'providerselection'
-
- self.setTitle(self.tr("Enter Provider"))
- self.setSubTitle(self.tr(
- "Please enter the domain of the provider you want "
- "to use for your connection")
- )
- self.setPixmap(
- QtGui.QWizard.LogoPixmap,
- QtGui.QPixmap(APP_LOGO))
-
- self.did_cert_check = False
-
- self.done = False
-
- self.setupSteps()
- self.setupUI()
-
- self.launchChecks.connect(
- self.launch_checks)
-
- self.providerNameEdit.editingFinished.connect(
- lambda: self.providerCheckButton.setFocus(True))
-
- def setupUI(self):
- """
- initializes the UI
- """
- providerNameLabel = QtGui.QLabel("h&ttps://")
- # note that we expect the bare domain name
- # we will add the scheme later
- providerNameEdit = QtGui.QLineEdit()
- providerNameEdit.cursorPositionChanged.connect(
- self.reset_validation_status)
- providerNameLabel.setBuddy(providerNameEdit)
-
- # add regex validator
- providerDomainRe = QtCore.QRegExp(r"^[a-z1-9_\-\.]+$")
- providerNameEdit.setValidator(
- QtGui.QRegExpValidator(providerDomainRe, self))
- self.providerNameEdit = providerNameEdit
-
- # Eventually we will seed a list of
- # well known providers here.
-
- #providercombo = QtGui.QComboBox()
- #if providers:
- #for provider in providers:
- #providercombo.addItem(provider)
- #providerNameSelect = providercombo
-
- self.registerField("provider_domain*", self.providerNameEdit)
- #self.registerField('provider_name_index', providerNameSelect)
-
- validationMsg = QtGui.QLabel("")
- validationMsg.setStyleSheet(styles.ErrorLabelStyleSheet)
- self.validationMsg = validationMsg
- providerCheckButton = QtGui.QPushButton(self.tr("chec&k!"))
- self.providerCheckButton = providerCheckButton
-
- # cert info
-
- # this is used in the callback
- # for the checkbox changes.
- # tricky, since the first time came
- # from the exception message.
- # should get string from exception too!
- self.bad_cert_status = self.tr(
- "Server certificate could not be verified.")
-
- self.certInfo = QtGui.QLabel("")
- self.certInfo.setWordWrap(True)
- self.certWarning = QtGui.QLabel("")
- self.trustProviderCertCheckBox = QtGui.QCheckBox(
- self.tr("&Trust this provider certificate."))
-
- self.trustProviderCertCheckBox.stateChanged.connect(
- self.onTrustCheckChanged)
- self.providerNameEdit.textChanged.connect(
- self.onProviderChanged)
- self.providerCheckButton.clicked.connect(
- self.onCheckButtonClicked)
-
- layout = QtGui.QGridLayout()
- layout.addWidget(validationMsg, 0, 2)
- layout.addWidget(providerNameLabel, 1, 1)
- layout.addWidget(providerNameEdit, 1, 2)
- layout.addWidget(providerCheckButton, 1, 3)
-
- # add certinfo group
- # XXX not shown now. should move to validation box.
- #layout.addWidget(certinfoGroup, 4, 1, 4, 2)
- #self.certinfoGroup = certinfoGroup
- #self.certinfoGroup.hide()
-
- # add validation frame
- self.setupValidationFrame()
- layout.addWidget(self.valFrame, 4, 2, 4, 2)
- self.valFrame.hide()
-
- self.setLayout(layout)
-
- # certinfo
-
- def setupCertInfoGroup(self): # pragma: no cover
- # XXX not used now.
- certinfoGroup = QtGui.QGroupBox(
- self.tr("Certificate validation"))
- certinfoLayout = QtGui.QVBoxLayout()
- certinfoLayout.addWidget(self.certInfo)
- certinfoLayout.addWidget(self.certWarning)
- certinfoLayout.addWidget(self.trustProviderCertCheckBox)
- certinfoGroup.setLayout(certinfoLayout)
- self.certinfoGroup = self.certinfoGroup
-
- # progress frame
-
- def setupValidationFrame(self):
- qframe = QtGui.QFrame
- valFrame = qframe()
- valFrame.setFrameStyle(qframe.NoFrame)
- valframeLayout = QtGui.QVBoxLayout()
- zeros = (0, 0, 0, 0)
- valframeLayout.setContentsMargins(*zeros)
-
- valframeLayout.addWidget(self.stepsTableWidget)
- valFrame.setLayout(valframeLayout)
- self.valFrame = valFrame
-
- @QtCore.pyqtSlot()
- def onDisableCheckButton(self):
- #print 'CHECK BUTTON DISABLED!!!'
- self.providerCheckButton.setDisabled(True)
-
- @QtCore.pyqtSlot()
- def launch_checks(self):
- self.do_checks()
-
- def onCheckButtonClicked(self):
- QtCore.QMetaObject.invokeMethod(
- self, "onDisableCheckButton")
-
- QtCore.QMetaObject.invokeMethod(
- self, "showStepsFrame")
-
- delay(self, "launch_checks")
-
- def _do_checks(self):
- """
- generator that yields actual checks
- that are executed in a separate thread
- """
-
- wizard = self.wizard()
- full_domain = self.providerNameEdit.text()
-
- # we check if we have a port in the domain string.
- domain, port = get_https_domain_and_port(full_domain)
- _domain = u"%s:%s" % (domain, port) if port != 443 else unicode(domain)
-
- netchecker = wizard.netchecker()
- providercertchecker = wizard.providercertchecker()
- eipconfigchecker = wizard.eipconfigchecker(domain=_domain)
-
- yield(("head_sentinel", 0), lambda: None)
-
- ########################
- # 1) try name resolution
- ########################
-
- def namecheck():
- """
- in which we check if
- we are able to name resolve
- this domain
- """
- try:
- #import ipdb;ipdb.set_trace()
- netchecker.check_name_resolution(
- domain)
-
- except baseexceptions.LeapException as exc:
- logger.error(exc.message)
- return self.fail(exc.usermessage)
-
- except Exception as exc:
- return self.fail(exc.message)
-
- else:
- return True
-
- logger.debug('checking name resolution')
- yield((self.tr("Checking if it is a valid provider"), 20), namecheck)
-
- #########################
- # 2) try https connection
- #########################
-
- def httpscheck():
- """
- in which we check
- if the provider
- is offering service over
- https
- """
- try:
- providercertchecker.is_https_working(
- "https://%s" % _domain,
- verify=True)
-
- except eipexceptions.HttpsBadCertError as exc:
- logger.debug('exception')
- return self.fail(exc.usermessage)
- # XXX skipping for now...
- ##############################################
- # We had this validation logic
- # in the provider selection page before
- ##############################################
- #if self.trustProviderCertCheckBox.isChecked():
- #pass
- #else:
- #fingerprint = certs.get_cert_fingerprint(
- #domain=domain, sep=" ")
-
- # it's ok if we've trusted this fgprt before
- #trustedcrts = wizard.trusted_certs
- #if trustedcrts and \
- # fingerprint.replace(' ', '') in trustedcrts:
- #pass
- #else:
- # let your user face panick :P
- #self.add_cert_info(fingerprint)
- #self.did_cert_check = True
- #self.completeChanged.emit()
- #return False
-
- except baseexceptions.LeapException as exc:
- return self.fail(exc.usermessage)
-
- except Exception as exc:
- return self.fail(exc.message)
-
- else:
- return True
-
- logger.debug('checking https connection')
- yield((self.tr("Checking for a secure connection"), 40), httpscheck)
-
- ##################################
- # 3) try download provider info...
- ##################################
-
- def fetchinfo():
- try:
- # XXX we already set _domain in the initialization
- # so it should not be needed here.
- eipconfigchecker.fetch_definition(domain=_domain)
- wizard.set_providerconfig(
- eipconfigchecker.defaultprovider.config)
- except requests.exceptions.SSLError:
- return self.fail(self.tr(
- "Could not get info from provider."))
- except requests.exceptions.ConnectionError:
- return self.fail(self.tr(
- "Could not download provider info "
- "(refused conn.)."))
-
- except Exception as exc:
- return self.fail(
- self.tr(exc.message))
- else:
- return True
-
- yield((self.tr("Getting info from the provider"), 80), fetchinfo)
-
- # done!
-
- self.done = True
- yield(("end_sentinel", 100), lambda: None)
-
- def on_checks_validation_ready(self):
- """
- called after _do_checks has finished.
- """
- self.domain_checked = True
- self.completeChanged.emit()
- # let's set focus...
- if self.is_done():
- self.wizard().clean_validation_error(self.current_page)
- nextbutton = self.wizard().button(QtGui.QWizard.NextButton)
- nextbutton.setFocus()
- else:
- self.providerNameEdit.setFocus()
-
- # cert trust verification
- # (disabled for now)
-
- def is_insecure_cert_trusted(self):
- return self.trustProviderCertCheckBox.isChecked()
-
- def onTrustCheckChanged(self, state): # pragma: no cover XXX
- checked = False
- if state == 2:
- checked = True
-
- if checked:
- self.reset_validation_status()
- else:
- self.set_validation_status(self.bad_cert_status)
-
- # trigger signal to redraw next button
- self.completeChanged.emit()
-
- def add_cert_info(self, certinfo): # pragma: no cover XXX
- self.certWarning.setText(
- self.tr("Do you want to <b>trust this provider certificate?</b>"))
- # XXX Check if this needs to abstracted to remove certinfo
- self.certInfo.setText(
- self.tr('SHA-256 fingerprint: <i>%s</i><br>' % certinfo))
- self.certInfo.setWordWrap(True)
- self.certinfoGroup.show()
-
- def onProviderChanged(self, text):
- self.done = False
- provider = self.providerNameEdit.text()
- if provider:
- self.providerCheckButton.setDisabled(False)
- else:
- self.providerCheckButton.setDisabled(True)
- self.completeChanged.emit()
-
- def reset_validation_status(self):
- """
- empty the validation msg
- and clean the inline validation widget.
- """
- self.validationMsg.setText('')
- self.steps.removeAllSteps()
- self.clearTable()
- self.domain_checked = False
-
- # pagewizard methods
-
- def isComplete(self):
- provider = self.providerNameEdit.text()
-
- if not self.is_done():
- return False
-
- if not provider:
- return False
- else:
- if self.is_insecure_cert_trusted():
- return True
- if not self.did_cert_check:
- if self.is_done():
- # XXX sure?
- return True
- return False
-
- def populateErrors(self):
- # XXX could move this to ValidationMixin
- # with some defaults for the validating fields
- # (now it only allows one field, manually specified)
-
- #logger.debug('getting errors')
- errors = self.wizard().get_validation_error(
- self.current_page)
- if errors:
- bad_str = getattr(self, 'bad_string', None)
- cur_str = self.providerNameEdit.text()
- showerr = self.validationMsg.setText
- markred = lambda: self.providerNameEdit.setStyleSheet(
- styles.ErrorLineEdit)
- umarkrd = lambda: self.providerNameEdit.setStyleSheet(
- styles.RegularLineEdit)
- if bad_str is None:
- # first time we fall here.
- # save the current bad_string value
- self.bad_string = cur_str
- showerr(errors)
- markred()
- else:
- # not the first time
- # XXX hey, this is getting convoluted.
- # roll out this.
- # but be careful about all the possibilities
- # with going back and forth once you
- # enter a domain.
- if cur_str == bad_str:
- showerr(errors)
- markred()
- else:
- if not getattr(self, 'domain_checked', None):
- showerr('')
- umarkrd()
- else:
- self.bad_string = cur_str
- showerr(errors)
-
- def cleanup_errormsg(self):
- """
- we reset bad_string to None
- should be called before leaving the page
- """
- self.bad_string = None
- self.domain_checked = False
-
- def paintEvent(self, event):
- """
- we hook our populate errors
- on paintEvent because we need it to catch
- when user enters the page coming from next,
- and initializePage does not cover that case.
- Maybe there's a better event to hook upon.
- """
- super(SelectProviderPage, self).paintEvent(event)
- self.populateErrors()
-
- def initializePage(self):
- self.validationMsg.setText('')
- if hasattr(self, 'certinfoGroup'):
- # XXX remove ?
- self.certinfoGroup.hide()
- self.done = False
- self.providerCheckButton.setDisabled(True)
- self.valFrame.hide()
- self.steps.removeAllSteps()
- self.clearTable()
-
- def validatePage(self):
- # some cleanup before we leave the page
- self.cleanup_errormsg()
-
- # go
- return True
-
- def nextId(self):
- wizard = self.wizard()
- if not wizard:
- return
- return wizard.get_page_index('providerinfo')
diff --git a/src/leap/gui/firstrun/providersetup.py b/src/leap/gui/firstrun/providersetup.py
deleted file mode 100644
index 47060f6e..00000000
--- a/src/leap/gui/firstrun/providersetup.py
+++ /dev/null
@@ -1,157 +0,0 @@
-"""
-Provider Setup Validation Page,
-used if First Run Wizard
-"""
-import logging
-
-import requests
-
-from PyQt4 import QtGui
-
-from leap.base import exceptions as baseexceptions
-from leap.gui.progress import ValidationPage
-
-from leap.gui.constants import APP_LOGO
-
-logger = logging.getLogger(__name__)
-
-
-class ProviderSetupValidationPage(ValidationPage):
- def __init__(self, parent=None):
- super(ProviderSetupValidationPage, self).__init__(parent)
- self.current_page = "providersetupvalidation"
-
- # XXX needed anymore?
- #is_signup = self.field("is_signup")
- #self.is_signup = is_signup
-
- self.setTitle(self.tr("Provider setup"))
- self.setSubTitle(
- self.tr("Gathering configuration options for this provider"))
-
- self.setPixmap(
- QtGui.QWizard.LogoPixmap,
- QtGui.QPixmap(APP_LOGO))
-
- def _do_checks(self):
- """
- generator that yields actual checks
- that are executed in a separate thread
- """
-
- full_domain = self.field('provider_domain')
- wizard = self.wizard()
- pconfig = wizard.providerconfig
-
- #pCertChecker = wizard.providercertchecker
- #certchecker = pCertChecker(domain=full_domain)
- pCertChecker = wizard.providercertchecker(
- domain=full_domain)
-
- yield(("head_sentinel", 0), lambda: None)
-
- ########################
- # 1) fetch ca cert
- ########################
-
- def fetchcacert():
- if pconfig:
- ca_cert_uri = pconfig.get('ca_cert_uri').geturl()
- else:
- ca_cert_uri = None
-
- # XXX check scheme == "https"
- # XXX passing verify == False because
- # we have trusted right before.
- # We should check it's the same domain!!!
- # (Check with the trusted fingerprints dict
- # or something smart)
- try:
- pCertChecker.download_ca_cert(
- uri=ca_cert_uri,
- verify=False)
-
- except baseexceptions.LeapException as exc:
- logger.error(exc.message)
- # XXX this should be _ method
- return self.fail(self.tr(exc.usermessage))
-
- except Exception as exc:
- return self.fail(exc.message)
-
- else:
- return True
-
- yield((self.tr('Fetching CA certificate'), 30),
- fetchcacert)
-
- #########################
- # 2) check CA fingerprint
- #########################
-
- def checkcafingerprint():
- # XXX get the real thing!!!
- pass
- #ca_cert_fingerprint = pconfig.get('ca_cert_fingerprint', None)
-
- # XXX get fingerprint dict (types)
- #sha256_fpr = ca_cert_fingerprint.split('=')[1]
-
- #validate_fpr = pCertChecker.check_ca_cert_fingerprint(
- #fingerprint=sha256_fpr)
- #if not validate_fpr:
- # XXX update validationMsg
- # should catch exception
- #return False
-
- yield((self.tr("Checking CA fingerprint"), 60),
- checkcafingerprint)
-
- #########################
- # 2) check CA fingerprint
- #########################
-
- def validatecacert():
- api_uri = pconfig.get('api_uri', None)
- try:
- pCertChecker.verify_api_https(api_uri)
- except requests.exceptions.SSLError as exc:
- return self.fail("Validation Error")
- except Exception as exc:
- return self.fail(exc.msg)
- else:
- return True
-
- yield((self.tr('Validating api certificate'), 90), validatecacert)
-
- self.set_done()
- yield(('end_sentinel', 100), lambda: None)
-
- def on_checks_validation_ready(self):
- """
- called after _do_checks has finished
- (connected to checker thread finished signal)
- """
- wizard = self.wizard()
- prevpage = "login" if wizard.from_login else "providerselection"
-
- if self.errors:
- logger.debug('going back with errors')
- name, first_error = self.pop_first_error()
- wizard.set_validation_error(
- prevpage,
- first_error)
-
- def nextId(self):
- wizard = self.wizard()
- from_login = wizard.from_login
- if from_login:
- next_ = 'connect'
- else:
- next_ = 'signup'
- return wizard.get_page_index(next_)
-
- def initializePage(self):
- super(ProviderSetupValidationPage, self).initializePage()
- self.set_undone()
- self.completeChanged.emit()
diff --git a/src/leap/gui/firstrun/register.py b/src/leap/gui/firstrun/register.py
deleted file mode 100644
index 15278330..00000000
--- a/src/leap/gui/firstrun/register.py
+++ /dev/null
@@ -1,387 +0,0 @@
-"""
-Register User Page, used in First Run Wizard
-"""
-import json
-import logging
-import socket
-
-import requests
-
-from PyQt4 import QtCore
-from PyQt4 import QtGui
-
-from leap.gui.firstrun.mixins import UserFormMixIn
-
-logger = logging.getLogger(__name__)
-
-from leap.base import auth
-from leap.gui import styles
-from leap.gui.constants import APP_LOGO, BARE_USERNAME_REGEX
-from leap.gui.progress import InlineValidationPage
-from leap.gui.styles import ErrorLabelStyleSheet
-
-
-class RegisterUserPage(InlineValidationPage, UserFormMixIn):
-
- def __init__(self, parent=None):
-
- super(RegisterUserPage, self).__init__(parent)
- self.current_page = "signup"
-
- self.setTitle(self.tr("Sign Up"))
- # subtitle is set in the initializePage
-
- self.setPixmap(
- QtGui.QWizard.LogoPixmap,
- QtGui.QPixmap(APP_LOGO))
-
- # commit page means there's no way back after this...
- # XXX should change the text on the "commit" button...
- self.setCommitPage(True)
-
- self.setupSteps()
- self.setupUI()
- self.do_confirm_next = False
- self.focused_field = False
-
- def setupUI(self):
- userNameLabel = QtGui.QLabel(self.tr("User &name:"))
- userNameLineEdit = QtGui.QLineEdit()
- userNameLineEdit.cursorPositionChanged.connect(
- self.reset_validation_status)
- userNameLabel.setBuddy(userNameLineEdit)
-
- # let's add regex validator
- usernameRe = QtCore.QRegExp(BARE_USERNAME_REGEX)
- userNameLineEdit.setValidator(
- QtGui.QRegExpValidator(usernameRe, self))
- self.userNameLineEdit = userNameLineEdit
-
- userPasswordLabel = QtGui.QLabel(self.tr("&Password:"))
- self.userPasswordLineEdit = QtGui.QLineEdit()
- self.userPasswordLineEdit.setEchoMode(
- QtGui.QLineEdit.Password)
- userPasswordLabel.setBuddy(self.userPasswordLineEdit)
-
- userPassword2Label = QtGui.QLabel(self.tr("Password (again):"))
- self.userPassword2LineEdit = QtGui.QLineEdit()
- self.userPassword2LineEdit.setEchoMode(
- QtGui.QLineEdit.Password)
- userPassword2Label.setBuddy(self.userPassword2LineEdit)
-
- rememberPasswordCheckBox = QtGui.QCheckBox(
- self.tr("&Remember username and password."))
- rememberPasswordCheckBox.setChecked(True)
-
- self.registerField('userName*', self.userNameLineEdit)
- self.registerField('userPassword*', self.userPasswordLineEdit)
- self.registerField('userPassword2*', self.userPassword2LineEdit)
-
- # XXX missing password confirmation
- # XXX validator!
-
- self.registerField('rememberPassword', rememberPasswordCheckBox)
-
- layout = QtGui.QGridLayout()
- layout.setColumnMinimumWidth(0, 20)
-
- validationMsg = QtGui.QLabel("")
- validationMsg.setStyleSheet(ErrorLabelStyleSheet)
-
- self.validationMsg = validationMsg
-
- layout.addWidget(validationMsg, 0, 3)
- layout.addWidget(userNameLabel, 1, 0)
- layout.addWidget(self.userNameLineEdit, 1, 3)
- layout.addWidget(userPasswordLabel, 2, 0)
- layout.addWidget(userPassword2Label, 3, 0)
- layout.addWidget(self.userPasswordLineEdit, 2, 3)
- layout.addWidget(self.userPassword2LineEdit, 3, 3)
- layout.addWidget(rememberPasswordCheckBox, 4, 3, 4, 4)
-
- # add validation frame
- self.setupValidationFrame()
- layout.addWidget(self.valFrame, 5, 2, 5, 2)
- self.valFrame.hide()
-
- self.setLayout(layout)
- self.commitText("Sign up!")
-
- # commit button
-
- def commitText(self, text):
- # change "commit" button text
- self.setButtonText(
- QtGui.QWizard.CommitButton, text)
-
- @property
- def commitButton(self):
- return self.wizard().button(QtGui.QWizard.CommitButton)
-
- def commitFocus(self):
- self.commitButton.setFocus()
-
- def disableCommitButton(self):
- self.commitButton.setDisabled(True)
-
- def disableFields(self):
- for field in (self.userNameLineEdit,
- self.userPasswordLineEdit,
- self.userPassword2LineEdit):
- field.setDisabled(True)
-
- # error painting
- def paintEvent(self, event):
- """
- we hook our populate errors
- on paintEvent because we need it to catch
- when user enters the page coming from next,
- and initializePage does not cover that case.
- Maybe there's a better event to hook upon.
- """
- super(RegisterUserPage, self).paintEvent(event)
- self.populateErrors()
-
- def markRedAndGetFocus(self, field):
- field.setStyleSheet(styles.ErrorLineEdit)
- if not self.focused_field:
- self.focused_field = True
- field.setFocus(QtCore.Qt.OtherFocusReason)
-
- def markRegular(self, field):
- field.setStyleSheet(styles.RegularLineEdit)
-
- def populateErrors(self):
- def showerr(text):
- self.validationMsg.setText(text)
- err_lower = text.lower()
- if "username" in err_lower:
- self.markRedAndGetFocus(
- self.userNameLineEdit)
- if "password" in err_lower:
- self.markRedAndGetFocus(
- self.userPasswordLineEdit)
-
- def unmarkred():
- for field in (self.userNameLineEdit,
- self.userPasswordLineEdit,
- self.userPassword2LineEdit):
- self.markRegular(field)
-
- errors = self.wizard().get_validation_error(
- self.current_page)
- if errors:
- bad_str = getattr(self, 'bad_string', None)
- cur_str = self.userNameLineEdit.text()
- #prev_er = getattr(self, 'prevalidation_error', None)
-
- if bad_str is None:
- # first time we fall here.
- # save the current bad_string value
- self.bad_string = cur_str
- showerr(errors)
- else:
- #if prev_er:
- #showerr(prev_er)
- #return
- # not the first time
- if cur_str == bad_str:
- showerr(errors)
- else:
- self.focused_field = False
- showerr('')
- unmarkred()
- else:
- # no errors
- self.focused_field = False
- unmarkred()
-
- def cleanup_errormsg(self):
- """
- we reset bad_string to None
- should be called before leaving the page
- """
- self.bad_string = None
-
- def green_validation_status(self):
- val = self.validationMsg
- val.setText(self.tr('Registration succeeded!'))
- val.setStyleSheet(styles.GreenLineEdit)
-
- def reset_validation_status(self):
- """
- empty the validation msg
- and clean the inline validation widget.
- """
- self.validationMsg.setText('')
- self.steps.removeAllSteps()
- self.clearTable()
-
- # actual checks
-
- def _do_checks(self):
- """
- generator that yields actual checks
- that are executed in a separate thread
- """
- wizard = self.wizard()
-
- provider = self.field('provider_domain')
- username = self.userNameLineEdit.text()
- password = self.userPasswordLineEdit.text()
- password2 = self.userPassword2LineEdit.text()
-
- pconfig = wizard.eipconfigchecker(domain=provider)
- pconfig.defaultprovider.load()
- pconfig.set_api_domain()
-
- def checkpass():
- # we better have here
- # some call to a password checker...
- # to assess strenght and avoid silly stuff.
-
- if password != password2:
- return self.fail(self.tr('Password does not match..'))
-
- if len(password) < 6:
- #self.set_prevalidation_error('Password too short.')
- return self.fail(self.tr('Password too short.'))
-
- if password == "123456":
- # joking, but not too much.
- #self.set_prevalidation_error('Password too obvious.')
- return self.fail(self.tr('Password too obvious.'))
-
- # go
- return True
-
- yield(("head_sentinel", 0), checkpass)
-
- # XXX should emit signal for .show the frame!
- # XXX HERE!
-
- ##################################################
- # 1) register user
- ##################################################
-
- # show the frame before going on...
- QtCore.QMetaObject.invokeMethod(
- self, "showStepsFrame")
-
- def register():
-
- signup = auth.LeapSRPRegister(
- schema="https",
- provider=pconfig.apidomain,
- verify=pconfig.cacert)
- try:
- ok, req = signup.register_user(
- username, password)
-
- except socket.timeout:
- return self.fail(
- self.tr("Error connecting to provider (timeout)"))
-
- except requests.exceptions.ConnectionError as exc:
- logger.error(exc.message)
- return self.fail(
- self.tr('Error Connecting to provider (connerr).'))
- except Exception as exc:
- return self.fail(exc.message)
-
- # XXX check for != OK instead???
-
- if req.status_code in (404, 500):
- return self.fail(
- self.tr(
- "Error during registration (%s)") % req.status_code)
-
- try:
- validation_msgs = json.loads(req.content)
- errors = validation_msgs.get('errors', None)
- logger.debug('validation errors: %s' % validation_msgs)
- except ValueError:
- # probably bad json returned
- return self.fail(
- self.tr(
- "Could not register (bad response)"))
-
- if errors and errors.get('login', None):
- # XXX this sometimes catch the blank username
- # but we're not allowing that (soon)
- return self.fail(
- self.tr('Username not available.'))
-
- return True
-
- logger.debug('registering user')
- yield(("Registering username", 40), register)
-
- self.set_done()
- yield(("end_sentinel", 100), lambda: None)
-
- def on_checks_validation_ready(self):
- """
- after checks
- """
- if self.is_done():
- self.disableFields()
- self.cleanup_errormsg()
- self.clean_wizard_errors(self.current_page)
- # make the user confirm the transition
- # to next page.
- self.commitText('Connect!')
- self.commitFocus()
- self.green_validation_status()
- self.do_confirm_next = True
-
- # pagewizard methods
-
- def validatePage(self):
- """
- if not register done, do checks.
- if done, wait for click.
- """
- self.disableCommitButton()
- self.cleanup_errormsg()
- self.clean_wizard_errors(self.current_page)
-
- # After a successful validation
- # (ie, success register with server)
- # we change the commit button text
- # and set this flag to True.
- if self.do_confirm_next:
- return True
-
- if not self.is_done():
- # calls checks, which after successful
- # execution will call on_checks_validation_ready
- self.reset_validation_status()
- self.do_checks()
-
- return self.is_done()
-
- def initializePage(self):
- """
- inits wizard page
- """
- provider = unicode(self.field('provider_domain'))
- if provider:
- # here we should have provider
- # but in tests we might not.
-
- # XXX this error causes a segfault on free()
- # that we might want to get fixed ...
- #self.setSubTitle(
- #self.tr("Register a new user with provider %s.") %
- #provider)
- self.setSubTitle(
- self.tr("Register a new user with provider <em>%s</em>" %
- provider))
- self.validationMsg.setText('')
- self.userPassword2LineEdit.setText('')
- self.valFrame.hide()
-
- def nextId(self):
- wizard = self.wizard()
- return wizard.get_page_index('connect')
diff --git a/src/leap/gui/firstrun/tests/integration/fake_provider.py b/src/leap/gui/firstrun/tests/integration/fake_provider.py
deleted file mode 100755
index 668db5d1..00000000
--- a/src/leap/gui/firstrun/tests/integration/fake_provider.py
+++ /dev/null
@@ -1,302 +0,0 @@
-#!/usr/bin/env python
-"""A server faking some of the provider resources and apis,
-used for testing Leap Client requests
-
-It needs that you create a subfolder named 'certs',
-and that you place the following files:
-
-[ ] certs/leaptestscert.pem
-[ ] certs/leaptestskey.pem
-[ ] certs/cacert.pem
-[ ] certs/openvpn.pem
-
-[ ] provider.json
-[ ] eip-service.json
-"""
-# XXX NOTE: intended for manual debug.
-# I intend to include this as a regular test after 0.2.0 release
-# (so we can add twisted as a dep there)
-import binascii
-import json
-import os
-import sys
-
-# python SRP LIB (! important MUST be >=1.0.1 !)
-import srp
-
-# GnuTLS Example -- is not working as expected
-#from gnutls import crypto
-#from gnutls.constants import COMP_LZO, COMP_DEFLATE, COMP_NULL
-#from gnutls.interfaces.twisted import X509Credentials
-
-# Going with OpenSSL as a workaround instead
-# But we DO NOT want to introduce this dependency.
-from OpenSSL import SSL
-
-from zope.interface import Interface, Attribute, implements
-
-from twisted.web.server import Site
-from twisted.web.static import File
-from twisted.web.resource import Resource
-from twisted.internet import reactor
-
-from leap.testing.https_server import where
-
-# See
-# http://twistedmatrix.com/documents/current/web/howto/web-in-60/index.htmln
-# for more examples
-
-"""
-Testing the FAKE_API:
-#####################
-
- 1) register an user
- >> curl -d "user[login]=me" -d "user[password_salt]=foo" \
- -d "user[password_verifier]=beef" http://localhost:8000/1/users.json
- << {"errors": null}
-
- 2) check that if you try to register again, it will fail:
- >> curl -d "user[login]=me" -d "user[password_salt]=foo" \
- -d "user[password_verifier]=beef" http://localhost:8000/1/users.json
- << {"errors": {"login": "already taken!"}}
-
-"""
-
-# Globals to mock user/sessiondb
-
-USERDB = {}
-SESSIONDB = {}
-
-
-safe_unhexlify = lambda x: binascii.unhexlify(x) \
- if (len(x) % 2 == 0) else binascii.unhexlify('0' + x)
-
-
-class IUser(Interface):
- login = Attribute("User login.")
- salt = Attribute("Password salt.")
- verifier = Attribute("Password verifier.")
- session = Attribute("Session.")
- svr = Attribute("Server verifier.")
-
-
-class User(object):
- implements(IUser)
-
- def __init__(self, login, salt, verifier):
- self.login = login
- self.salt = salt
- self.verifier = verifier
- self.session = None
-
- def set_server_verifier(self, svr):
- self.svr = svr
-
- def set_session(self, session):
- SESSIONDB[session] = self
- self.session = session
-
-
-class FakeUsers(Resource):
- def __init__(self, name):
- self.name = name
-
- def render_POST(self, request):
- args = request.args
-
- login = args['user[login]'][0]
- salt = args['user[password_salt]'][0]
- verifier = args['user[password_verifier]'][0]
-
- if login in USERDB:
- return "%s\n" % json.dumps(
- {'errors': {'login': 'already taken!'}})
-
- print login, verifier, salt
- user = User(login, salt, verifier)
- USERDB[login] = user
- return json.dumps({'errors': None})
-
-
-def get_user(request):
- login = request.args.get('login')
- if login:
- user = USERDB.get(login[0], None)
- if user:
- return user
-
- session = request.getSession()
- user = SESSIONDB.get(session, None)
- return user
-
-
-class FakeSession(Resource):
- def __init__(self, name):
- self.name = name
-
- def render_GET(self, request):
- return "%s\n" % json.dumps({'errors': None})
-
- def render_POST(self, request):
-
- user = get_user(request)
-
- if not user:
- # XXX get real error from demo provider
- return json.dumps({'errors': 'no such user'})
-
- A = request.args['A'][0]
-
- _A = safe_unhexlify(A)
- _salt = safe_unhexlify(user.salt)
- _verifier = safe_unhexlify(user.verifier)
-
- svr = srp.Verifier(
- user.login,
- _salt,
- _verifier,
- _A,
- hash_alg=srp.SHA256,
- ng_type=srp.NG_1024)
-
- s, B = svr.get_challenge()
-
- _B = binascii.hexlify(B)
-
- print 'login = %s' % user.login
- print 'salt = %s' % user.salt
- print 'len(_salt) = %s' % len(_salt)
- print 'vkey = %s' % user.verifier
- print 'len(vkey) = %s' % len(_verifier)
- print 's = %s' % binascii.hexlify(s)
- print 'B = %s' % _B
- print 'len(B) = %s' % len(_B)
-
- session = request.getSession()
- user.set_session(session)
- user.set_server_verifier(svr)
-
- # yep, this is tricky.
- # some things are *already* unhexlified.
- data = {
- 'salt': user.salt,
- 'B': _B,
- 'errors': None}
-
- return json.dumps(data)
-
- def render_PUT(self, request):
-
- # XXX check session???
- user = get_user(request)
-
- if not user:
- print 'NO USER'
- return json.dumps({'errors': 'no such user'})
-
- data = request.content.read()
- auth = data.split("client_auth=")
- M = auth[1] if len(auth) > 1 else None
- # if not H, return
- if not M:
- return json.dumps({'errors': 'no M proof passed by client'})
-
- svr = user.svr
- HAMK = svr.verify_session(binascii.unhexlify(M))
- if HAMK is None:
- print 'verification failed!!!'
- raise Exception("Authentication failed!")
- #import ipdb;ipdb.set_trace()
-
- assert svr.authenticated()
- print "***"
- print 'server authenticated user SRP!'
- print "***"
-
- return json.dumps(
- {'M2': binascii.hexlify(HAMK), 'errors': None})
-
-
-class API_Sessions(Resource):
- def getChild(self, name, request):
- return FakeSession(name)
-
-
-def get_certs_path():
- script_path = os.path.realpath(os.path.dirname(sys.argv[0]))
- certs_path = os.path.join(script_path, 'certs')
- return certs_path
-
-
-def get_TLS_credentials():
- # XXX this is giving errors
- # XXX REview! We want to use gnutls!
-
- cert = crypto.X509Certificate(
- open(where('leaptestscert.pem')).read())
- key = crypto.X509PrivateKey(
- open(where('leaptestskey.pem')).read())
- ca = crypto.X509Certificate(
- open(where('cacert.pem')).read())
- #crl = crypto.X509CRL(open(certs_path + '/crl.pem').read())
- #cred = crypto.X509Credentials(cert, key, [ca], [crl])
- cred = X509Credentials(cert, key, [ca])
- cred.verify_peer = True
- cred.session_params.compressions = (COMP_LZO, COMP_DEFLATE, COMP_NULL)
- return cred
-
-
-class OpenSSLServerContextFactory:
- # XXX workaround for broken TLS interface
- # from gnuTLS.
-
- def getContext(self):
- """Create an SSL context.
- This is a sample implementation that loads a certificate from a file
- called 'server.pem'."""
-
- ctx = SSL.Context(SSL.SSLv23_METHOD)
- #certs_path = get_certs_path()
- #ctx.use_certificate_file(certs_path + '/leaptestscert.pem')
- #ctx.use_privatekey_file(certs_path + '/leaptestskey.pem')
- ctx.use_certificate_file(where('leaptestscert.pem'))
- ctx.use_privatekey_file(where('leaptestskey.pem'))
- return ctx
-
-
-def serve_fake_provider():
- root = Resource()
- root.putChild("provider.json", File("./provider.json"))
- config = Resource()
- config.putChild(
- "eip-service.json",
- File("./eip-service.json"))
- apiv1 = Resource()
- apiv1.putChild("config", config)
- apiv1.putChild("sessions.json", API_Sessions())
- apiv1.putChild("users.json", FakeUsers(None))
- apiv1.putChild("cert", File(get_certs_path() + '/openvpn.pem'))
- root.putChild("1", apiv1)
-
- cred = get_TLS_credentials()
-
- factory = Site(root)
-
- # regular http (for debugging with curl)
- reactor.listenTCP(8000, factory)
-
- # TLS with gnutls --- seems broken :(
- #reactor.listenTLS(8003, factory, cred)
-
- # OpenSSL
- reactor.listenSSL(8443, factory, OpenSSLServerContextFactory())
-
- reactor.run()
-
-
-if __name__ == "__main__":
-
- from twisted.python import log
- log.startLogging(sys.stdout)
-
- serve_fake_provider()
diff --git a/src/leap/gui/firstrun/wizard.py b/src/leap/gui/firstrun/wizard.py
deleted file mode 100755
index f198dca0..00000000
--- a/src/leap/gui/firstrun/wizard.py
+++ /dev/null
@@ -1,309 +0,0 @@
-#!/usr/bin/env python
-import logging
-
-import sip
-try:
- sip.setapi('QString', 2)
- sip.setapi('QVariant', 2)
-except ValueError:
- pass
-
-from PyQt4 import QtCore
-from PyQt4 import QtGui
-
-from leap.base import checks as basechecks
-from leap.crypto import leapkeyring
-from leap.eip import checks as eipchecks
-
-from leap.gui import firstrun
-
-from leap.gui import mainwindow_rc
-
-try:
- from collections import OrderedDict
-except ImportError:
- # We must be in 2.6
- from leap.util.dicts import OrderedDict
-
-logger = logging.getLogger(__name__)
-
-"""
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-Work in progress!
-~~~~~~~~~~~~~~~~~~~~~~~~~~
-This wizard still needs to be refactored out.
-
-TODO-ish:
-
-[X] Break file in wizard / pages files (and its own folder).
-[ ] Separate presentation from logic.
-[ ] Have a "manager" class for connections, that can be
- dep-injected for testing.
-[ ] Document signals used / expected.
-[ ] Separate style from widgets.
-[ ] Fix TOFU Widget for provider cert.
-[X] Refactor widgets out.
-[ ] Follow more MVC style.
-[ ] Maybe separate "first run wizard" into different wizards
- that share some of the pages?
-"""
-
-
-def get_pages_dict():
- return OrderedDict((
- ('intro', firstrun.intro.IntroPage),
- ('providerselection',
- firstrun.providerselect.SelectProviderPage),
- ('login', firstrun.login.LogInPage),
- ('providerinfo', firstrun.providerinfo.ProviderInfoPage),
- ('providersetupvalidation',
- firstrun.providersetup.ProviderSetupValidationPage),
- ('signup', firstrun.register.RegisterUserPage),
- ('connect',
- firstrun.connect.ConnectionPage),
- ('lastpage', firstrun.last.LastPage)
- ))
-
-
-class FirstRunWizard(QtGui.QWizard):
-
- def __init__(
- self,
- conductor_instance,
- parent=None,
- pages_dict=None,
- username=None,
- providers=None,
- success_cb=None, is_provider_setup=False,
- trusted_certs=None,
- netchecker=basechecks.LeapNetworkChecker,
- providercertchecker=eipchecks.ProviderCertChecker,
- eipconfigchecker=eipchecks.EIPConfigChecker,
- start_eipconnection_signal=None,
- eip_statuschange_signal=None,
- debug_server=None,
- quitcallback=None):
- super(FirstRunWizard, self).__init__(
- parent,
- QtCore.Qt.WindowStaysOnTopHint)
-
- # we keep a reference to the conductor
- # to be able to launch eip checks and connection
- # in the connection page, before the wizard has ended.
- self.conductor = conductor_instance
-
- self.username = username
- self.providers = providers
-
- # success callback
- self.success_cb = success_cb
-
- # is provider setup?
- self.is_provider_setup = is_provider_setup
-
- # a dict with trusted fingerprints
- # in the form {'nospacesfingerprint': ['host1', 'host2']}
- self.trusted_certs = trusted_certs
-
- # Checkers
- self.netchecker = netchecker
- self.providercertchecker = providercertchecker
- self.eipconfigchecker = eipconfigchecker
-
- # debug server
- self.debug_server = debug_server
-
- # Signals
- # will be emitted in connecting page
- self.start_eipconnection_signal = start_eipconnection_signal
- self.eip_statuschange_signal = eip_statuschange_signal
-
- if quitcallback is not None:
- self.button(
- QtGui.QWizard.CancelButton).clicked.connect(
- quitcallback)
-
- self.providerconfig = None
- # previously registered
- # if True, jumps to LogIn page.
- # by setting 1st page??
- #self.is_previously_registered = is_previously_registered
- # XXX ??? ^v
- self.is_previously_registered = bool(self.username)
- self.from_login = False
-
- pages_dict = pages_dict or get_pages_dict()
- self.add_pages_from_dict(pages_dict)
-
- self.validation_errors = {}
- self.openvpn_status = []
-
- self.setPixmap(
- QtGui.QWizard.BannerPixmap,
- QtGui.QPixmap(':/images/banner.png'))
- self.setPixmap(
- QtGui.QWizard.BackgroundPixmap,
- QtGui.QPixmap(':/images/background.png'))
-
- # set options
- self.setOption(QtGui.QWizard.IndependentPages, on=False)
- self.setOption(QtGui.QWizard.NoBackButtonOnStartPage, on=True)
-
- self.setWindowTitle("First Run Wizard")
-
- # TODO: set style for MAC / windows ...
- #self.setWizardStyle()
-
- #
- # setup pages in wizard
- #
-
- def add_pages_from_dict(self, pages_dict):
- """
- @param pages_dict: the dictionary with pages, where
- values are a tuple of InstanceofWizardPage, kwargs.
- @type pages_dict: dict
- """
- for name, page in pages_dict.items():
- # XXX check for is_previously registered
- # and skip adding the signup branch if so
- self.addPage(page())
- self.pages_dict = pages_dict
-
- def get_page_index(self, page_name):
- """
- returns the index of the given page
- @param page_name: the name of the desired page
- @type page_name: str
- @rparam: index of page in wizard
- @rtype: int
- """
- return self.pages_dict.keys().index(page_name)
-
- #
- # validation errors
- #
-
- def set_validation_error(self, pagename, error):
- self.validation_errors[pagename] = error
-
- def clean_validation_error(self, pagename):
- vald = self.validation_errors
- if pagename in vald:
- del vald[pagename]
-
- def get_validation_error(self, pagename):
- return self.validation_errors.get(pagename, None)
-
- def accept(self):
- """
- final step in the wizard.
- gather the info, update settings
- and call the success callback if any has been passed.
- """
- super(FirstRunWizard, self).accept()
-
- # username and password are in different fields
- # if they were stored in log_in or sign_up pages.
- from_login = self.from_login
- unamek_base = 'userName'
- passwk_base = 'userPassword'
- unamek = 'login_%s' % unamek_base if from_login else unamek_base
- passwk = 'login_%s' % passwk_base if from_login else passwk_base
-
- username = self.field(unamek)
- password = self.field(passwk)
- provider = self.field('provider_domain')
- remember_pass = self.field('rememberPassword')
-
- logger.debug('chosen provider: %s', provider)
- logger.debug('username: %s', username)
- logger.debug('remember password: %s', remember_pass)
-
- # we are assuming here that we only remember one username
- # in the form username@provider.domain
- # We probably could extend this to support some form of
- # profiles.
-
- settings = QtCore.QSettings()
-
- settings.setValue("FirstRunWizardDone", True)
- settings.setValue("provider_domain", provider)
- full_username = "%s@%s" % (username, provider)
-
- settings.setValue("remember_user_and_pass", remember_pass)
-
- if remember_pass:
- settings.setValue("username", full_username)
- seed = self.get_random_str(10)
- settings.setValue("%s_seed" % provider, seed)
-
- # XXX #744: comment out for 0.2.0 release
- # if we need to have a version of python-keyring < 0.9
- leapkeyring.leap_set_password(
- full_username, password, seed=seed)
-
- logger.debug('First Run Wizard Done.')
- cb = self.success_cb
- if cb and callable(cb):
- self.success_cb()
-
- # misc helpers
-
- def get_random_str(self, n):
- """
- returns a random string
- :param n: the length of the desired string
- :rvalue: str
- """
- from string import (ascii_uppercase, ascii_lowercase, digits)
- from random import choice
- return ''.join(choice(
- ascii_uppercase +
- ascii_lowercase +
- digits) for x in range(n))
-
- def set_providerconfig(self, providerconfig):
- """
- sets a providerconfig attribute
- used when we fetch and parse a json configuration
- """
- self.providerconfig = providerconfig
-
- def get_provider_by_index(self): # pragma: no cover
- """
- returns the value of a provider given its index.
- this was used in the select provider page,
- in the case where we were preseeding providers in a combobox
- """
- # Leaving it here for the moment when we go back at the
- # option of preseeding with known provider values.
- provider = self.field('provider_index')
- return self.providers[provider]
-
-
-if __name__ == '__main__':
- # standalone test
- # it can be (somehow) run against
- # gui/tests/integration/fake_user_signup.py
-
- import sys
- import logging
- logging.basicConfig()
- logger = logging.getLogger()
- logger.setLevel(logging.DEBUG)
-
- app = QtGui.QApplication(sys.argv)
- server = sys.argv[1] if len(sys.argv) > 1 else None
-
- trusted_certs = {
- "3DF83F316BFA0186"
- "0A11A5C9C7FC24B9"
- "18C62B941192CC1A"
- "49AE62218B2A4B7C": ['springbok']}
-
- wizard = FirstRunWizard(
- None, trusted_certs=trusted_certs,
- debug_server=server)
- wizard.show()
- sys.exit(app.exec_())
diff --git a/src/leap/gui/locale_rc.py b/src/leap/gui/locale_rc.py
deleted file mode 100644
index 8c383709..00000000
--- a/src/leap/gui/locale_rc.py
+++ /dev/null
@@ -1,813 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Resource object code
-#
-# Created: Fri Jan 25 18:19:04 2013
-# by: The Resource Compiler for PyQt (Qt v4.8.2)
-#
-# WARNING! All changes made in this file will be lost!
-
-from PyQt4 import QtCore
-
-qt_resource_data = "\
-\x00\x00\x17\x94\
-\x3c\
-\xb8\x64\x18\xca\xef\x9c\x95\xcd\x21\x1c\xbf\x60\xa1\xbd\xdd\x42\
-\x00\x00\x01\x30\x00\x8f\x9b\xbe\x00\x00\x14\x69\x01\x23\x92\xe5\
-\x00\x00\x10\x2f\x01\x87\x64\x8e\x00\x00\x08\xbe\x01\xa8\xbe\x7e\
-\x00\x00\x0d\xf4\x02\x2c\xac\xe9\x00\x00\x0b\x9c\x02\x3a\xce\xbf\
-\x00\x00\x15\xe2\x02\x6e\x0f\xe5\x00\x00\x09\x2d\x02\x87\x60\x9e\
-\x00\x00\x06\xc6\x02\xaa\x52\x6e\x00\x00\x07\xc9\x02\xf2\xe0\x59\
-\x00\x00\x0a\x6c\x03\xec\x70\x0e\x00\x00\x10\x9c\x04\xd4\x45\xee\
-\x00\x00\x0d\x3c\x05\xb7\x8f\x59\x00\x00\x0c\x35\x06\x3e\x6a\x9e\
-\x00\x00\x06\x01\x06\x40\xa8\x7e\x00\x00\x0b\x02\x06\xee\xff\x6e\
-\x00\x00\x13\x50\x08\x13\xe8\xae\x00\x00\x0c\xc2\x08\x7a\x64\xee\
-\x00\x00\x11\x8b\x08\xe6\x98\x33\x00\x00\x05\x93\x08\xe6\x98\x33\
-\x00\x00\x0f\xb0\x09\x5c\x35\xe1\x00\x00\x0e\x96\x09\x74\x75\x4e\
-\x00\x00\x0d\x9c\x09\x98\x34\x0e\x00\x00\x12\x55\x09\xd8\x1f\x95\
-\x00\x00\x15\x19\x09\xfc\x2c\x8e\x00\x00\x05\x19\x09\xfe\x05\x90\
-\x00\x00\x0f\x06\x0a\x74\xb8\x1e\x00\x00\x00\xe6\x0a\xfd\x99\xfe\
-\x00\x00\x00\x6d\x0b\xd2\x4b\x3f\x00\x00\x07\x7d\x0c\x44\x41\xbe\
-\x00\x00\x00\x00\x0c\xc0\x94\x05\x00\x00\x09\xf2\x0d\x0d\x9d\xc5\
-\x00\x00\x06\x5f\x0d\x15\x34\x70\x00\x00\x09\x98\x0e\x36\x15\x54\
-\x00\x00\x08\x47\x0e\x7e\xf5\xee\x00\x00\x0f\x42\x0e\x91\x50\x3e\
-\x00\x00\x15\x76\x0e\xc0\xbb\x72\x00\x00\x12\xfb\x0f\x27\x0d\x6e\
-\x00\x00\x11\x22\x69\x00\x00\x16\x43\x03\x00\x00\x00\x3e\x00\x41\
-\x00\x73\x00\x73\x00\x69\x00\x73\x00\x74\x00\x65\x00\x6e\x00\x74\
-\x00\x20\x00\x66\x00\xfc\x00\x72\x00\x20\x00\x65\x00\x72\x00\x73\
-\x00\x74\x00\x6d\x00\x61\x00\x6c\x00\x69\x00\x67\x00\x65\x00\x6e\
-\x00\x20\x00\x53\x00\x74\x00\x61\x00\x72\x00\x74\x08\x00\x00\x00\
-\x00\x06\x00\x00\x00\x11\x46\x69\x72\x73\x74\x20\x72\x75\x6e\x20\
-\x77\x69\x7a\x61\x72\x64\x2e\x07\x00\x00\x00\x09\x49\x6e\x74\x72\
-\x6f\x50\x61\x67\x65\x01\x03\x00\x00\x00\x40\x00\x4d\x00\x69\x00\
-\x74\x00\x20\x00\x62\x00\x65\x00\x73\x00\x74\x00\x65\x00\x68\x00\
-\x65\x00\x6e\x00\x64\x00\x65\x00\x6e\x00\x20\x00\x44\x00\x61\x00\
-\x74\x00\x65\x00\x6e\x00\x20\x00\x65\x00\x69\x00\x6e\x00\x6c\x00\
-\x6f\x00\x67\x00\x67\x00\x65\x00\x6e\x00\x2e\x08\x00\x00\x00\x00\
-\x06\x00\x00\x00\x1b\x4c\x6f\x67\x20\x49\x6e\x20\x77\x69\x74\x68\
-\x20\x6d\x79\x20\x63\x72\x65\x64\x65\x6e\x74\x69\x61\x6c\x73\x2e\
-\x07\x00\x00\x00\x09\x49\x6e\x74\x72\x6f\x50\x61\x67\x65\x01\x03\
-\x00\x00\x02\xb8\x00\x57\x00\x69\x00\x72\x00\x20\x00\x77\x00\x65\
-\x00\x72\x00\x64\x00\x65\x00\x6e\x00\x20\x00\x64\x00\x69\x00\x63\
-\x00\x68\x00\x20\x00\x6e\x00\x75\x00\x6e\x00\x20\x00\x64\x00\x75\
-\x00\x72\x00\x63\x00\x68\x00\x20\x00\x65\x00\x69\x00\x6e\x00\x69\
-\x00\x67\x00\x65\x00\x20\x00\x4b\x00\x6f\x00\x6e\x00\x66\x00\x69\
-\x00\x67\x00\x75\x00\x72\x00\x61\x00\x74\x00\x69\x00\x6f\x00\x6e\
-\x00\x65\x00\x6e\x00\x20\x00\x66\x00\xfc\x00\x68\x00\x72\x00\x65\
-\x00\x6e\x00\x2c\x00\x20\x00\x64\x00\x69\x00\x65\x00\x20\x00\x64\
-\x00\x75\x00\x20\x00\x66\x00\xfc\x00\x72\x00\x20\x00\x64\x00\x65\
-\x00\x6e\x00\x20\x00\x65\x00\x72\x00\x73\x00\x74\x00\x65\x00\x6e\
-\x00\x20\x00\x53\x00\x74\x00\x61\x00\x72\x00\x74\x00\x20\x00\x62\
-\x00\x65\x00\x6e\x00\xf6\x00\x74\x00\x69\x00\x67\x00\x73\x00\x74\
-\x00\x2e\x00\x3c\x00\x62\x00\x72\x00\x3e\x00\x3c\x00\x62\x00\x72\
-\x00\x3e\x00\x57\x00\x65\x00\x6e\x00\x6e\x00\x20\x00\x64\x00\x75\
-\x00\x20\x00\x64\x00\x69\x00\x65\x00\x73\x00\x65\x00\x20\x00\x4b\
-\x00\x6f\x00\x6e\x00\x66\x00\x69\x00\x67\x00\x75\x00\x72\x00\x61\
-\x00\x74\x00\x69\x00\x6f\x00\x6e\x00\x65\x00\x6e\x00\x20\x00\x6a\
-\x00\x65\x00\x6d\x00\x61\x00\x6c\x00\x73\x00\x20\x00\xe4\x00\x6e\
-\x00\x64\x00\x65\x00\x72\x00\x6e\x00\x20\x00\x6d\x00\x75\x00\x73\
-\x00\x73\x00\x74\x00\x2c\x00\x20\x00\x66\x00\x69\x00\x6e\x00\x64\
-\x00\x65\x00\x73\x00\x74\x00\x20\x00\x64\x00\x75\x00\x20\x00\x64\
-\x00\x65\x00\x6e\x00\x20\x00\x41\x00\x73\x00\x73\x00\x69\x00\x73\
-\x00\x74\x00\x65\x00\x6e\x00\x74\x00\x65\x00\x6e\x00\x20\x00\x69\
-\x00\x6d\x00\x20\x00\x27\x00\x3c\x00\x69\x00\x3e\x00\x45\x00\x69\
-\x00\x6e\x00\x73\x00\x74\x00\x65\x00\x6c\x00\x6c\x00\x75\x00\x6e\
-\x00\x67\x00\x65\x00\x6e\x00\x3c\x00\x2f\x00\x69\x00\x3e\x00\x27\
-\x00\x2d\x00\x4d\x00\x65\x00\x6e\x00\xfc\x00\x20\x00\x64\x00\x65\
-\x00\x73\x00\x20\x00\x48\x00\x61\x00\x75\x00\x70\x00\x66\x00\x65\
-\x00\x6e\x00\x73\x00\x74\x00\x65\x00\x72\x00\x73\x00\x2e\x00\x3c\
-\x00\x62\x00\x72\x00\x3e\x00\x3c\x00\x62\x00\x72\x00\x3e\x00\x4d\
-\x00\xf6\x00\x63\x00\x68\x00\x74\x00\x65\x00\x73\x00\x74\x00\x20\
-\x00\x64\x00\x75\x00\x20\x00\x64\x00\x69\x00\x63\x00\x68\x00\x20\
-\x00\x66\x00\xfc\x00\x72\x00\x20\x00\x65\x00\x69\x00\x6e\x00\x65\
-\x00\x6e\x00\x20\x00\x6e\x00\x65\x00\x75\x00\x65\x00\x6e\x00\x20\
-\x00\x41\x00\x63\x00\x63\x00\x6f\x00\x75\x00\x6e\x00\x74\x00\x20\
-\x00\x3c\x00\x62\x00\x3e\x00\x61\x00\x6e\x00\x6d\x00\x65\x00\x6c\
-\x00\x64\x00\x65\x00\x6e\x00\x3c\x00\x2f\x00\x62\x00\x3e\x00\x20\
-\x00\x6f\x00\x64\x00\x65\x00\x72\x00\x20\x00\x6d\x00\x69\x00\x74\
-\x00\x20\x00\x65\x00\x69\x00\x6e\x00\x65\x00\x6d\x00\x20\x00\x62\
-\x00\x65\x00\x73\x00\x74\x00\x65\x00\x68\x00\x65\x00\x6e\x00\x64\
-\x00\x65\x00\x6e\x00\x20\x00\x55\x00\x73\x00\x65\x00\x72\x00\x6e\
-\x00\x61\x00\x6d\x00\x65\x00\x6e\x00\x20\x00\x3c\x00\x62\x00\x3e\
-\x00\x65\x00\x69\x00\x6e\x00\x6c\x00\x6f\x00\x67\x00\x67\x00\x65\
-\x00\x6e\x00\x3c\x00\x2f\x00\x62\x00\x3e\x00\x3f\x08\x00\x00\x00\
-\x00\x06\x00\x00\x01\x5d\x4e\x6f\x77\x20\x77\x65\x20\x77\x69\x6c\
-\x6c\x20\x67\x75\x69\x64\x65\x20\x79\x6f\x75\x20\x74\x68\x72\x6f\
-\x75\x67\x68\x20\x73\x6f\x6d\x65\x20\x63\x6f\x6e\x66\x69\x67\x75\
-\x72\x61\x74\x69\x6f\x6e\x20\x74\x68\x61\x74\x20\x69\x73\x20\x6e\
-\x65\x65\x64\x65\x64\x20\x62\x65\x66\x6f\x72\x65\x20\x79\x6f\x75\
-\x20\x63\x61\x6e\x20\x63\x6f\x6e\x6e\x65\x63\x74\x20\x66\x6f\x72\
-\x20\x74\x68\x65\x20\x66\x69\x72\x73\x74\x20\x74\x69\x6d\x65\x2e\
-\x3c\x62\x72\x3e\x3c\x62\x72\x3e\x49\x66\x20\x79\x6f\x75\x20\x65\
-\x76\x65\x72\x20\x6e\x65\x65\x64\x20\x74\x6f\x20\x6d\x6f\x64\x69\
-\x66\x79\x20\x74\x68\x65\x73\x65\x20\x6f\x70\x74\x69\x6f\x6e\x73\
-\x20\x61\x67\x61\x69\x6e\x2c\x20\x79\x6f\x75\x20\x63\x61\x6e\x20\
-\x66\x69\x6e\x64\x20\x74\x68\x65\x20\x77\x69\x7a\x61\x72\x64\x20\
-\x69\x6e\x20\x74\x68\x65\x20\x27\x3c\x69\x3e\x53\x65\x74\x74\x69\
-\x6e\x67\x73\x3c\x2f\x69\x3e\x27\x20\x6d\x65\x6e\x75\x20\x66\x72\
-\x6f\x6d\x20\x74\x68\x65\x20\x6d\x61\x69\x6e\x20\x77\x69\x6e\x64\
-\x6f\x77\x2e\x3c\x62\x72\x3e\x3c\x62\x72\x3e\x44\x6f\x20\x79\x6f\
-\x75\x20\x77\x61\x6e\x74\x20\x74\x6f\x20\x3c\x62\x3e\x73\x69\x67\
-\x6e\x20\x75\x70\x3c\x2f\x62\x3e\x20\x66\x6f\x72\x20\x61\x20\x6e\
-\x65\x77\x20\x61\x63\x63\x6f\x75\x6e\x74\x2c\x20\x6f\x72\x20\x3c\
-\x62\x3e\x6c\x6f\x67\x20\x69\x6e\x3c\x2f\x62\x3e\x20\x77\x69\x74\
-\x68\x20\x61\x6e\x20\x61\x6c\x72\x65\x61\x64\x79\x20\x65\x78\x69\
-\x73\x74\x69\x6e\x67\x20\x75\x73\x65\x72\x6e\x61\x6d\x65\x3f\x3c\
-\x62\x72\x3e\x07\x00\x00\x00\x09\x49\x6e\x74\x72\x6f\x50\x61\x67\
-\x65\x01\x03\x00\x00\x00\x42\x00\x46\x00\xfc\x00\x72\x00\x20\x00\
-\x65\x00\x69\x00\x6e\x00\x65\x00\x6e\x00\x20\x00\x6e\x00\x65\x00\
-\x75\x00\x65\x00\x6e\x00\x20\x00\x41\x00\x63\x00\x63\x00\x6f\x00\
-\x75\x00\x6e\x00\x74\x00\x20\x00\x61\x00\x6e\x00\x6d\x00\x65\x00\
-\x6c\x00\x64\x00\x65\x00\x6e\x00\x2e\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x1a\x53\x69\x67\x6e\x20\x75\x70\x20\x66\x6f\x72\x20\x61\
-\x20\x6e\x65\x77\x20\x61\x63\x63\x6f\x75\x6e\x74\x2e\x07\x00\x00\
-\x00\x09\x49\x6e\x74\x72\x6f\x50\x61\x67\x65\x01\x03\x00\x00\x00\
-\x38\x00\x41\x00\x75\x00\x74\x00\x68\x00\x65\x00\x6e\x00\x74\x00\
-\x69\x00\x66\x00\x69\x00\x7a\x00\x69\x00\x65\x00\x72\x00\x75\x00\
-\x6e\x00\x67\x00\x73\x00\x66\x00\x65\x00\x68\x00\x6c\x00\x65\x00\
-\x72\x00\x3a\x00\x20\x00\x25\x00\x73\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x18\x41\x75\x74\x68\x65\x6e\x74\x69\x63\x61\x74\x69\x6f\
-\x6e\x20\x65\x72\x72\x6f\x72\x3a\x20\x25\x73\x07\x00\x00\x00\x09\
-\x4c\x6f\x67\x49\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x2a\x00\
-\x41\x00\x6e\x00\x6d\x00\x65\x00\x6c\x00\x64\x00\x65\x00\x64\x00\
-\x61\x00\x74\x00\x65\x00\x6e\x00\x20\x00\x6b\x00\x6f\x00\x72\x00\
-\x72\x00\x65\x00\x6b\x00\x74\x00\x2e\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x16\x43\x72\x65\x64\x65\x6e\x74\x69\x61\x6c\x73\x20\x76\
-\x61\x6c\x69\x64\x61\x74\x65\x64\x2e\x07\x00\x00\x00\x09\x4c\x6f\
-\x67\x49\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x34\x00\x41\x00\
-\x75\x00\x66\x00\x6c\x00\xf6\x00\x73\x00\x65\x00\x6e\x00\x20\x00\
-\x64\x00\x65\x00\x73\x00\x20\x00\x44\x00\x6f\x00\x6d\x00\x61\x00\
-\x69\x00\x6e\x00\x2d\x00\x4e\x00\x61\x00\x6d\x00\x65\x00\x6e\x00\
-\x73\x08\x00\x00\x00\x00\x06\x00\x00\x00\x15\x52\x65\x73\x6f\x6c\
-\x76\x69\x6e\x67\x20\x64\x6f\x6d\x61\x69\x6e\x20\x6e\x61\x6d\x65\
-\x07\x00\x00\x00\x09\x4c\x6f\x67\x49\x6e\x50\x61\x67\x65\x01\x03\
-\x00\x00\x00\x6a\x00\x44\x00\x65\x00\x72\x00\x20\x00\x55\x00\x73\
-\x00\x65\x00\x72\x00\x6e\x00\x61\x00\x6d\x00\x65\x00\x20\x00\x6d\
-\x00\x75\x00\x73\x00\x73\x00\x20\x00\x69\x00\x6e\x00\x20\x00\x64\
-\x00\x65\x00\x72\x00\x20\x00\x46\x00\x6f\x00\x72\x00\x6d\x00\x20\
-\x00\x75\x00\x73\x00\x65\x00\x72\x00\x6e\x00\x61\x00\x6d\x00\x65\
-\x00\x40\x00\x70\x00\x72\x00\x6f\x00\x76\x00\x69\x00\x64\x00\x65\
-\x00\x72\x00\x20\x00\x73\x00\x65\x00\x69\x00\x6e\x00\x2e\x08\x00\
-\x00\x00\x00\x06\x00\x00\x00\x2f\x55\x73\x65\x72\x6e\x61\x6d\x65\
-\x20\x6d\x75\x73\x74\x20\x62\x65\x20\x69\x6e\x20\x74\x68\x65\x20\
-\x75\x73\x65\x72\x6e\x61\x6d\x65\x40\x70\x72\x6f\x76\x69\x64\x65\
-\x72\x20\x66\x6f\x72\x6d\x2e\x07\x00\x00\x00\x09\x4c\x6f\x67\x49\
-\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x1a\x00\x50\x00\x72\x00\
-\x6f\x00\x76\x00\x69\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x69\x00\
-\x6e\x00\x66\x00\x6f\x08\x00\x00\x00\x00\x06\x00\x00\x00\x0d\x50\
-\x72\x6f\x76\x69\x64\x65\x72\x20\x49\x6e\x66\x6f\x07\x00\x00\x00\
-\x10\x50\x72\x6f\x76\x69\x64\x65\x72\x49\x6e\x66\x6f\x50\x61\x67\
-\x65\x01\x03\x00\x00\x00\x3e\x00\x44\x00\x61\x00\x73\x00\x20\x00\
-\x69\x00\x73\x00\x74\x00\x2c\x00\x20\x00\x77\x00\x61\x00\x73\x00\
-\x20\x00\x64\x00\x65\x00\x72\x00\x20\x00\x50\x00\x72\x00\x6f\x00\
-\x76\x00\x69\x00\x64\x00\x65\x00\x72\x00\x20\x00\x73\x00\x61\x00\
-\x67\x00\x74\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x1b\x54\
-\x68\x69\x73\x20\x69\x73\x20\x77\x68\x61\x74\x20\x70\x72\x6f\x76\
-\x69\x64\x65\x72\x20\x73\x61\x79\x73\x2e\x07\x00\x00\x00\x10\x50\
-\x72\x6f\x76\x69\x64\x65\x72\x49\x6e\x66\x6f\x50\x61\x67\x65\x01\
-\x03\x00\x00\x00\x30\x00\xdc\x00\x62\x00\x65\x00\x72\x00\x70\x00\
-\x72\x00\xfc\x00\x66\x00\x65\x00\x20\x00\x43\x00\x41\x00\x2d\x00\
-\x46\x00\x69\x00\x6e\x00\x67\x00\x65\x00\x72\x00\x70\x00\x72\x00\
-\x69\x00\x6e\x00\x74\x08\x00\x00\x00\x00\x06\x00\x00\x00\x17\x43\
-\x68\x65\x63\x6b\x69\x6e\x67\x20\x43\x41\x20\x66\x69\x6e\x67\x65\
-\x72\x70\x72\x69\x6e\x74\x07\x00\x00\x00\x1b\x50\x72\x6f\x76\x69\
-\x64\x65\x72\x53\x65\x74\x75\x70\x56\x61\x6c\x69\x64\x61\x74\x69\
-\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x2e\x00\x46\x00\xfc\
-\x00\x68\x00\x72\x00\x65\x00\x20\x00\x61\x00\x75\x00\x74\x00\x6f\
-\x00\x63\x00\x6f\x00\x6e\x00\x66\x00\x69\x00\x67\x00\x20\x00\x64\
-\x00\x75\x00\x72\x00\x63\x00\x68\x00\x2e\x08\x00\x00\x00\x00\x06\
-\x00\x00\x00\x11\x44\x6f\x69\x6e\x67\x20\x61\x75\x74\x6f\x63\x6f\
-\x6e\x66\x69\x67\x2e\x07\x00\x00\x00\x1b\x50\x72\x6f\x76\x69\x64\
-\x65\x72\x53\x65\x74\x75\x70\x56\x61\x6c\x69\x64\x61\x74\x69\x6f\
-\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x24\x00\x48\x00\x6f\x00\
-\x6c\x00\x65\x00\x20\x00\x43\x00\x41\x00\x2d\x00\x5a\x00\x65\x00\
-\x72\x00\x74\x00\x69\x00\x66\x00\x69\x00\x6b\x00\x61\x00\x74\x08\
-\x00\x00\x00\x00\x06\x00\x00\x00\x17\x46\x65\x74\x63\x68\x69\x6e\
-\x67\x20\x43\x41\x20\x63\x65\x72\x74\x69\x66\x69\x63\x61\x74\x65\
-\x07\x00\x00\x00\x1b\x50\x72\x6f\x76\x69\x64\x65\x72\x53\x65\x74\
-\x75\x70\x56\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\x50\x61\x67\x65\
-\x01\x03\x00\x00\x00\x1c\x00\x50\x00\x72\x00\x6f\x00\x76\x00\x69\
-\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x53\x00\x65\x00\x74\x00\x75\
-\x00\x70\x08\x00\x00\x00\x00\x06\x00\x00\x00\x0e\x50\x72\x6f\x76\
-\x69\x64\x65\x72\x20\x73\x65\x74\x75\x70\x07\x00\x00\x00\x1b\x50\
-\x72\x6f\x76\x69\x64\x65\x72\x53\x65\x74\x75\x70\x56\x61\x6c\x69\
-\x64\x61\x74\x69\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x30\
-\x00\xdc\x00\x62\x00\x65\x00\x72\x00\x70\x00\x72\x00\xfc\x00\x66\
-\x00\x65\x00\x20\x00\x41\x00\x50\x00\x49\x00\x2d\x00\x5a\x00\x65\
-\x00\x72\x00\x74\x00\x69\x00\x66\x00\x69\x00\x6b\x00\x61\x00\x74\
-\x08\x00\x00\x00\x00\x06\x00\x00\x00\x1a\x56\x61\x6c\x69\x64\x61\
-\x74\x69\x6e\x67\x20\x61\x70\x69\x20\x63\x65\x72\x74\x69\x66\x69\
-\x63\x61\x74\x65\x07\x00\x00\x00\x1b\x50\x72\x6f\x76\x69\x64\x65\
-\x72\x53\x65\x74\x75\x70\x56\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x50\x00\x4b\x00\x6f\x00\x6e\
-\x00\x6e\x00\x74\x00\x65\x00\x20\x00\x6e\x00\x69\x00\x63\x00\x68\
-\x00\x74\x00\x20\x00\x72\x00\x65\x00\x67\x00\x69\x00\x73\x00\x74\
-\x00\x72\x00\x69\x00\x65\x00\x72\x00\x65\x00\x6e\x00\x20\x00\x28\
-\x00\x62\x00\x61\x00\x64\x00\x20\x00\x72\x00\x65\x00\x73\x00\x70\
-\x00\x6f\x00\x6e\x00\x73\x00\x65\x00\x29\x08\x00\x00\x00\x00\x06\
-\x00\x00\x00\x21\x43\x6f\x75\x6c\x64\x20\x6e\x6f\x74\x20\x72\x65\
-\x67\x69\x73\x74\x65\x72\x20\x28\x62\x61\x64\x20\x72\x65\x73\x70\
-\x6f\x6e\x73\x65\x29\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x4e\
-\x00\x56\x00\x65\x00\x72\x00\x62\x00\x69\x00\x6e\x00\x64\x00\x75\
-\x00\x6e\x00\x67\x00\x73\x00\x66\x00\x65\x00\x68\x00\x6c\x00\x65\
-\x00\x72\x00\x20\x00\x7a\x00\x75\x00\x20\x00\x50\x00\x72\x00\x6f\
-\x00\x76\x00\x69\x00\x64\x00\x65\x00\x72\x00\x20\x00\x28\x00\x63\
-\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x72\x00\x72\x00\x29\x08\x00\
-\x00\x00\x00\x06\x00\x00\x00\x27\x45\x72\x72\x6f\x72\x20\x43\x6f\
-\x6e\x6e\x65\x63\x74\x69\x6e\x67\x20\x74\x6f\x20\x70\x72\x6f\x76\
-\x69\x64\x65\x72\x20\x28\x63\x6f\x6e\x6e\x65\x72\x72\x29\x2e\x07\
-\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x4e\x00\x56\x00\x65\x00\x72\
-\x00\x62\x00\x69\x00\x6e\x00\x64\x00\x75\x00\x6e\x00\x67\x00\x73\
-\x00\x66\x00\x65\x00\x68\x00\x6c\x00\x65\x00\x72\x00\x20\x00\x7a\
-\x00\x75\x00\x20\x00\x50\x00\x72\x00\x6f\x00\x76\x00\x69\x00\x64\
-\x00\x65\x00\x72\x00\x20\x00\x28\x00\x74\x00\x69\x00\x6d\x00\x65\
-\x00\x6f\x00\x75\x00\x74\x00\x29\x08\x00\x00\x00\x00\x06\x00\x00\
-\x00\x26\x45\x72\x72\x6f\x72\x20\x63\x6f\x6e\x6e\x65\x63\x74\x69\
-\x6e\x67\x20\x74\x6f\x20\x70\x72\x6f\x76\x69\x64\x65\x72\x20\x28\
-\x74\x69\x6d\x65\x6f\x75\x74\x29\x07\x00\x00\x00\x10\x52\x65\x67\
-\x69\x73\x74\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\
-\x00\x00\x4a\x00\x46\x00\x65\x00\x68\x00\x6c\x00\x65\x00\x72\x00\
-\x20\x00\x77\x00\xe4\x00\x68\x00\x72\x00\x65\x00\x6e\x00\x64\x00\
-\x20\x00\x64\x00\x65\x00\x72\x00\x20\x00\x52\x00\x65\x00\x67\x00\
-\x69\x00\x73\x00\x74\x00\x72\x00\x69\x00\x65\x00\x72\x00\x75\x00\
-\x6e\x00\x67\x00\x20\x00\x28\x00\x25\x00\x73\x00\x29\x08\x00\x00\
-\x00\x00\x06\x00\x00\x00\x1e\x45\x72\x72\x6f\x72\x20\x64\x75\x72\
-\x69\x6e\x67\x20\x72\x65\x67\x69\x73\x74\x72\x61\x74\x69\x6f\x6e\
-\x20\x28\x25\x73\x29\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x3c\
-\x00\x50\x00\x61\x00\x73\x00\x73\x00\x77\x00\x6f\x00\x72\x00\x74\
-\x00\x20\x00\x73\x00\x74\x00\x69\x00\x6d\x00\x6d\x00\x74\x00\x20\
-\x00\x6e\x00\x69\x00\x63\x00\x68\x00\x74\x00\x20\x00\xfc\x00\x62\
-\x00\x65\x00\x72\x00\x69\x00\x65\x00\x6e\x00\x2e\x08\x00\x00\x00\
-\x00\x06\x00\x00\x00\x19\x50\x61\x73\x73\x77\x6f\x72\x64\x20\x64\
-\x6f\x65\x73\x20\x6e\x6f\x74\x20\x6d\x61\x74\x63\x68\x2e\x2e\x07\
-\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x26\x00\x50\x00\x61\x00\x73\
-\x00\x73\x00\x77\x00\x6f\x00\x72\x00\x74\x00\x20\x00\x7a\x00\x75\
-\x00\x20\x00\x73\x00\x69\x00\x6d\x00\x70\x00\x65\x00\x6c\x00\x2e\
-\x08\x00\x00\x00\x00\x06\x00\x00\x00\x15\x50\x61\x73\x73\x77\x6f\
-\x72\x64\x20\x74\x6f\x6f\x20\x6f\x62\x76\x69\x6f\x75\x73\x2e\x07\
-\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x20\x00\x50\x00\x61\x00\x73\
-\x00\x73\x00\x77\x00\x6f\x00\x72\x00\x74\x00\x20\x00\x7a\x00\x75\
-\x00\x20\x00\x6b\x00\x75\x00\x72\x00\x7a\x08\x00\x00\x00\x00\x06\
-\x00\x00\x00\x13\x50\x61\x73\x73\x77\x6f\x72\x64\x20\x74\x6f\x6f\
-\x20\x73\x68\x6f\x72\x74\x2e\x07\x00\x00\x00\x10\x52\x65\x67\x69\
-\x73\x74\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\
-\x00\x58\x00\x52\x00\x65\x00\x67\x00\x69\x00\x73\x00\x74\x00\x72\
-\x00\x69\x00\x65\x00\x72\x00\x65\x00\x20\x00\x65\x00\x69\x00\x6e\
-\x00\x65\x00\x6e\x00\x20\x00\x6e\x00\x65\x00\x75\x00\x65\x00\x6e\
-\x00\x20\x00\x55\x00\x73\x00\x65\x00\x72\x00\x20\x00\x62\x00\x65\
-\x00\x69\x00\x20\x00\x50\x00\x72\x00\x6f\x00\x76\x00\x69\x00\x64\
-\x00\x65\x00\x72\x00\x20\x00\x25\x00\x73\x08\x00\x00\x00\x00\x06\
-\x00\x00\x00\x25\x52\x65\x67\x69\x73\x74\x65\x72\x20\x61\x20\x6e\
-\x65\x77\x20\x75\x73\x65\x72\x20\x77\x69\x74\x68\x20\x70\x72\x6f\
-\x76\x69\x64\x65\x72\x20\x25\x73\x2e\x07\x00\x00\x00\x10\x52\x65\
-\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\
-\x00\x00\x00\x34\x00\x52\x00\x65\x00\x67\x00\x69\x00\x73\x00\x74\
-\x00\x72\x00\x69\x00\x65\x00\x72\x00\x75\x00\x6e\x00\x67\x00\x20\
-\x00\x65\x00\x72\x00\x66\x00\x6f\x00\x6c\x00\x67\x00\x72\x00\x65\
-\x00\x69\x00\x63\x00\x68\x00\x21\x08\x00\x00\x00\x00\x06\x00\x00\
-\x00\x17\x52\x65\x67\x69\x73\x74\x72\x61\x74\x69\x6f\x6e\x20\x73\
-\x75\x63\x63\x65\x65\x64\x65\x64\x21\x07\x00\x00\x00\x10\x52\x65\
-\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\
-\x00\x00\x00\x10\x00\x41\x00\x6e\x00\x6d\x00\x65\x00\x6c\x00\x64\
-\x00\x65\x00\x6e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x07\x53\x69\
-\x67\x6e\x20\x55\x70\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x32\
-\x00\x55\x00\x73\x00\x65\x00\x72\x00\x6e\x00\x61\x00\x6d\x00\x65\
-\x00\x20\x00\x6e\x00\x69\x00\x63\x00\x68\x00\x74\x00\x20\x00\x76\
-\x00\x65\x00\x72\x00\x66\x00\xfc\x00\x67\x00\x62\x00\x61\x00\x72\
-\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x17\x55\x73\x65\x72\
-\x6e\x61\x6d\x65\x20\x6e\x6f\x74\x20\x61\x76\x61\x69\x6c\x61\x62\
-\x6c\x65\x2e\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\
-\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x38\x00\x41\
-\x00\x75\x00\x74\x00\x68\x00\x65\x00\x6e\x00\x74\x00\x69\x00\x66\
-\x00\x69\x00\x7a\x00\x69\x00\x65\x00\x72\x00\x75\x00\x6e\x00\x67\
-\x00\x73\x00\x66\x00\x65\x00\x68\x00\x6c\x00\x65\x00\x72\x00\x3a\
-\x00\x20\x00\x25\x00\x73\x08\x00\x00\x00\x00\x06\x00\x00\x00\x18\
-\x41\x75\x74\x68\x65\x6e\x74\x69\x63\x61\x74\x69\x6f\x6e\x20\x65\
-\x72\x72\x6f\x72\x3a\x20\x25\x73\x07\x00\x00\x00\x1a\x52\x65\x67\
-\x69\x73\x74\x65\x72\x55\x73\x65\x72\x56\x61\x6c\x69\x64\x61\x74\
-\x69\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x26\x00\x48\x00\
-\x6f\x00\x6c\x00\x65\x00\x20\x00\x45\x00\x49\x00\x50\x00\x2d\x00\
-\x5a\x00\x65\x00\x72\x00\x74\x00\x69\x00\x66\x00\x69\x00\x6b\x00\
-\x61\x00\x74\x08\x00\x00\x00\x00\x06\x00\x00\x00\x18\x46\x65\x74\
-\x63\x68\x69\x6e\x67\x20\x65\x69\x70\x20\x63\x65\x72\x74\x69\x66\
-\x69\x63\x61\x74\x65\x07\x00\x00\x00\x1a\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x56\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x3c\x00\x48\x00\x6f\x00\x6c\
-\x00\x65\x00\x20\x00\x50\x00\x72\x00\x6f\x00\x76\x00\x69\x00\x64\
-\x00\x65\x00\x72\x00\x2d\x00\x4b\x00\x6f\x00\x6e\x00\x66\x00\x69\
-\x00\x67\x00\x75\x00\x72\x00\x61\x00\x74\x00\x69\x00\x6f\x00\x6e\
-\x00\x2e\x00\x2e\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x1b\
-\x46\x65\x74\x63\x68\x69\x6e\x67\x20\x70\x72\x6f\x76\x69\x64\x65\
-\x72\x20\x63\x6f\x6e\x66\x69\x67\x2e\x2e\x2e\x07\x00\x00\x00\x1a\
-\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\x56\x61\x6c\x69\
-\x64\x61\x74\x69\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x2c\
-\x00\x5a\x00\x65\x00\x72\x00\x74\x00\x69\x00\x66\x00\x69\x00\x6b\
-\x00\x61\x00\x74\x00\x73\x00\xfc\x00\x62\x00\x65\x00\x72\x00\x70\
-\x00\x72\x00\xfc\x00\x66\x00\x75\x00\x6e\x00\x67\x08\x00\x00\x00\
-\x00\x06\x00\x00\x00\x16\x43\x65\x72\x74\x69\x66\x69\x63\x61\x74\
-\x65\x20\x76\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\x07\x00\x00\x00\
-\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\
-\x61\x67\x65\x01\x03\x00\x00\x00\x72\x00\x4b\x00\x6f\x00\x6e\x00\
-\x6e\x00\x74\x00\x65\x00\x20\x00\x50\x00\x72\x00\x6f\x00\x76\x00\
-\x69\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x49\x00\x6e\x00\x66\x00\
-\x6f\x00\x20\x00\x6e\x00\x69\x00\x63\x00\x68\x00\x74\x00\x20\x00\
-\x68\x00\x65\x00\x72\x00\x75\x00\x6e\x00\x74\x00\x65\x00\x72\x00\
-\x6c\x00\x61\x00\x64\x00\x65\x00\x6e\x00\x20\x00\x28\x00\x72\x00\
-\x65\x00\x66\x00\x75\x00\x73\x00\x65\x00\x64\x00\x20\x00\x63\x00\
-\x6f\x00\x6e\x00\x6e\x00\x2e\x00\x29\x00\x2e\x08\x00\x00\x00\x00\
-\x06\x00\x00\x00\x31\x43\x6f\x75\x6c\x64\x20\x6e\x6f\x74\x20\x64\
-\x6f\x77\x6e\x6c\x6f\x61\x64\x20\x70\x72\x6f\x76\x69\x64\x65\x72\
-\x20\x69\x6e\x66\x6f\x20\x28\x72\x65\x66\x75\x73\x65\x64\x20\x63\
-\x6f\x6e\x6e\x2e\x29\x2e\x07\x00\x00\x00\x12\x53\x65\x6c\x65\x63\
-\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x03\x00\
-\x00\x00\x5e\x00\x4b\x00\x6f\x00\x6e\x00\x6e\x00\x74\x00\x65\x00\
-\x20\x00\x6b\x00\x65\x00\x69\x00\x6e\x00\x65\x00\x20\x00\x49\x00\
-\x6e\x00\x66\x00\x6f\x00\x72\x00\x6d\x00\x61\x00\x74\x00\x69\x00\
-\x6f\x00\x6e\x00\x20\x00\x76\x00\x6f\x00\x6d\x00\x20\x00\x50\x00\
-\x72\x00\x6f\x00\x76\x00\x69\x00\x64\x00\x65\x00\x72\x00\x20\x00\
-\x62\x00\x65\x00\x6b\x00\x6f\x00\x6d\x00\x6d\x00\x65\x00\x6e\x00\
-\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x21\x43\x6f\x75\x6c\x64\
-\x20\x6e\x6f\x74\x20\x67\x65\x74\x20\x69\x6e\x66\x6f\x20\x66\x72\
-\x6f\x6d\x20\x70\x72\x6f\x76\x69\x64\x65\x72\x2e\x07\x00\x00\x00\
-\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\
-\x61\x67\x65\x01\x03\x00\x00\x00\x20\x00\x47\x00\x69\x00\x62\x00\
-\x20\x00\x50\x00\x72\x00\x6f\x00\x76\x00\x69\x00\x64\x00\x65\x00\
-\x72\x00\x20\x00\x65\x00\x69\x00\x6e\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x0e\x45\x6e\x74\x65\x72\x20\x50\x72\x6f\x76\x69\x64\x65\
-\x72\x07\x00\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\
-\x69\x64\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\xa6\x00\x42\
-\x00\x69\x00\x74\x00\x74\x00\x65\x00\x20\x00\x67\x00\x69\x00\x62\
-\x00\x20\x00\x64\x00\x69\x00\x65\x00\x20\x00\x44\x00\x6f\x00\x6d\
-\x00\x61\x00\x69\x00\x6e\x00\x20\x00\x64\x00\x65\x00\x73\x00\x20\
-\x00\x50\x00\x72\x00\x6f\x00\x76\x00\x69\x00\x64\x00\x65\x00\x72\
-\x00\x73\x00\x20\x00\x61\x00\x6e\x00\x2c\x00\x20\x00\x64\x00\x65\
-\x00\x6e\x00\x20\x00\x64\x00\x75\x00\x20\x00\x66\x00\xfc\x00\x72\
-\x00\x20\x00\x64\x00\x65\x00\x69\x00\x6e\x00\x65\x00\x20\x00\x56\
-\x00\x65\x00\x72\x00\x62\x00\x69\x00\x6e\x00\x64\x00\x75\x00\x6e\
-\x00\x67\x00\x20\x00\x6e\x00\x75\x00\x74\x00\x7a\x00\x65\x00\x6e\
-\x00\x20\x00\x6d\x00\xf6\x00\x63\x00\x68\x00\x74\x00\x65\x00\x73\
-\x00\x74\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x4c\x50\x6c\
-\x65\x61\x73\x65\x20\x65\x6e\x74\x65\x72\x20\x74\x68\x65\x20\x64\
-\x6f\x6d\x61\x69\x6e\x20\x6f\x66\x20\x74\x68\x65\x20\x70\x72\x6f\
-\x76\x69\x64\x65\x72\x20\x79\x6f\x75\x20\x77\x61\x6e\x74\x20\x74\
-\x6f\x20\x75\x73\x65\x20\x66\x6f\x72\x20\x79\x6f\x75\x72\x20\x63\
-\x6f\x6e\x6e\x65\x63\x74\x69\x6f\x6e\x2e\x07\x00\x00\x00\x12\x53\
-\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\x61\x67\
-\x65\x01\x03\x00\x00\x00\x60\x00\x53\x00\x65\x00\x72\x00\x76\x00\
-\x65\x00\x72\x00\x2d\x00\x5a\x00\x65\x00\x72\x00\x74\x00\x69\x00\
-\x66\x00\x69\x00\x6b\x00\x61\x00\x74\x00\x20\x00\x6b\x00\x6f\x00\
-\x6e\x00\x6e\x00\x74\x00\x65\x00\x20\x00\x6e\x00\x69\x00\x63\x00\
-\x68\x00\x74\x00\x20\x00\x62\x00\x65\x00\x73\x00\x74\x00\xe4\x00\
-\x74\x00\x69\x00\x67\x00\x74\x00\x20\x00\x77\x00\x65\x00\x72\x00\
-\x64\x00\x65\x00\x6e\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\
-\x29\x53\x65\x72\x76\x65\x72\x20\x63\x65\x72\x74\x69\x66\x69\x63\
-\x61\x74\x65\x20\x63\x6f\x75\x6c\x64\x20\x6e\x6f\x74\x20\x62\x65\
-\x20\x76\x65\x72\x69\x66\x69\x65\x64\x2e\x07\x00\x00\x00\x12\x53\
-\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\x61\x67\
-\x65\x01\x03\x00\x00\x00\x22\x00\x50\x00\x72\x00\xfc\x00\x66\x00\
-\x65\x00\x20\x00\x44\x00\x6f\x00\x6d\x00\x61\x00\x69\x00\x6e\x00\
-\x2d\x00\x4e\x00\x61\x00\x6d\x00\x65\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x14\x63\x68\x65\x63\x6b\x69\x6e\x67\x20\x64\x6f\x6d\x61\
-\x69\x6e\x20\x6e\x61\x6d\x65\x07\x00\x00\x00\x12\x53\x65\x6c\x65\
-\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x03\
-\x00\x00\x00\x2c\x00\x50\x00\x72\x00\xfc\x00\x66\x00\x65\x00\x20\
-\x00\x48\x00\x54\x00\x54\x00\x50\x00\x53\x00\x2d\x00\x56\x00\x65\
-\x00\x72\x00\x62\x00\x69\x00\x6e\x00\x64\x00\x75\x00\x6e\x00\x67\
-\x08\x00\x00\x00\x00\x06\x00\x00\x00\x19\x63\x68\x65\x63\x6b\x69\
-\x6e\x67\x20\x68\x74\x74\x70\x73\x20\x63\x6f\x6e\x6e\x65\x63\x74\
-\x69\x6f\x6e\x07\x00\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\x72\
-\x6f\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x24\
-\x00\x48\x00\x6f\x00\x6c\x00\x65\x00\x20\x00\x50\x00\x72\x00\x6f\
-\x00\x76\x00\x69\x00\x64\x00\x65\x00\x72\x00\x2d\x00\x49\x00\x6e\
-\x00\x66\x00\x6f\x08\x00\x00\x00\x00\x06\x00\x00\x00\x16\x66\x65\
-\x74\x63\x68\x69\x6e\x67\x20\x70\x72\x6f\x76\x69\x64\x65\x72\x20\
-\x69\x6e\x66\x6f\x07\x00\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\
-\x72\x6f\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x88\x00\x00\x00\
-\x02\x01\x01\
-\x00\x00\x18\x32\
-\x3c\
-\xb8\x64\x18\xca\xef\x9c\x95\xcd\x21\x1c\xbf\x60\xa1\xbd\xdd\x42\
-\x00\x00\x01\x38\x00\x8f\x9b\xbe\x00\x00\x14\x83\x01\x23\x92\xe5\
-\x00\x00\x10\x3d\x01\x87\x64\x8e\x00\x00\x08\x7a\x01\xa8\xbe\x7e\
-\x00\x00\x0e\x02\x02\x2c\xac\xe9\x00\x00\x0b\x8a\x02\x3a\xce\xbf\
-\x00\x00\x16\x62\x02\x6e\x0f\xe5\x00\x00\x08\xdd\x02\x87\x60\x9e\
-\x00\x00\x06\x6e\x02\xaa\x52\x6e\x00\x00\x07\x6b\x02\xf2\xe0\x59\
-\x00\x00\x0a\x5e\x03\xec\x70\x0e\x00\x00\x10\xb8\x04\xd4\x45\xee\
-\x00\x00\x0d\x24\x05\xb7\x8f\x59\x00\x00\x0c\x27\x06\x3e\x6a\x9e\
-\x00\x00\x05\x9f\x06\x40\xa8\x7e\x00\x00\x0a\xea\x06\xee\xff\x6e\
-\x00\x00\x13\x74\x08\x13\xe8\xae\x00\x00\x0c\xa6\x08\x7a\x64\xee\
-\x00\x00\x11\xc5\x08\xe6\x98\x33\x00\x00\x05\x35\x08\xe6\x98\x33\
-\x00\x00\x0f\xc2\x09\x5c\x35\xe1\x00\x00\x0e\xaa\x09\x74\x75\x4e\
-\x00\x00\x0d\x94\x09\x98\x34\x0e\x00\x00\x12\x89\x09\xd8\x1f\x95\
-\x00\x00\x15\x79\x09\xeb\x5c\xb1\x00\x00\x15\x35\x09\xfc\x2c\x8e\
-\x00\x00\x04\xc7\x09\xfe\x05\x90\x00\x00\x0f\x16\x0a\x74\xb8\x1e\
-\x00\x00\x00\xd6\x0a\xfd\x99\xfe\x00\x00\x00\x51\x0b\xd2\x4b\x3f\
-\x00\x00\x07\x15\x0c\x44\x41\xbe\x00\x00\x00\x00\x0c\xc0\x94\x05\
-\x00\x00\x09\xd6\x0d\x0d\x9d\xc5\x00\x00\x06\x01\x0d\x15\x34\x70\
-\x00\x00\x09\x62\x0e\x36\x15\x54\x00\x00\x07\xed\x0e\x7e\xf5\xee\
-\x00\x00\x0f\x5a\x0e\x91\x50\x3e\x00\x00\x15\xee\x0e\xc0\xbb\x72\
-\x00\x00\x13\x1b\x0f\x27\x0d\x6e\x00\x00\x11\x54\x69\x00\x00\x16\
-\xd9\x03\x00\x00\x00\x22\x00\x50\x00\x72\x00\x69\x00\x6d\x00\x65\
-\x00\x72\x00\x61\x00\x20\x00\x43\x00\x6f\x00\x6e\x00\x65\x00\x78\
-\x00\x69\x00\x6f\x00\x6e\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\
-\x00\x11\x46\x69\x72\x73\x74\x20\x72\x75\x6e\x20\x77\x69\x7a\x61\
-\x72\x64\x2e\x07\x00\x00\x00\x09\x49\x6e\x74\x72\x6f\x50\x61\x67\
-\x65\x01\x03\x00\x00\x00\x4c\x00\x4c\x00\x6f\x00\x67\x00\x75\x00\
-\x65\x00\x61\x00\x72\x00\x6d\x00\x65\x00\x20\x00\x63\x00\x6f\x00\
-\x6e\x00\x20\x00\x75\x00\x6e\x00\x20\x00\x75\x00\x73\x00\x75\x00\
-\x61\x00\x72\x00\x69\x00\x6f\x00\x20\x00\x71\x00\x75\x00\x65\x00\
-\x20\x00\x79\x00\x61\x00\x20\x00\x74\x00\x65\x00\x6e\x00\x67\x00\
-\x6f\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x1b\x4c\x6f\x67\
-\x20\x49\x6e\x20\x77\x69\x74\x68\x20\x6d\x79\x20\x63\x72\x65\x64\
-\x65\x6e\x74\x69\x61\x6c\x73\x2e\x07\x00\x00\x00\x09\x49\x6e\x74\
-\x72\x6f\x50\x61\x67\x65\x01\x03\x00\x00\x02\x76\x00\x56\x00\x61\
-\x00\x6d\x00\x6f\x00\x73\x00\x20\x00\x61\x00\x20\x00\x63\x00\x6f\
-\x00\x6e\x00\x66\x00\x69\x00\x67\x00\x75\x00\x72\x00\x61\x00\x72\
-\x00\x20\x00\x61\x00\x6c\x00\x67\x00\x75\x00\x6e\x00\x61\x00\x73\
-\x00\x20\x00\x63\x00\x6f\x00\x73\x00\x61\x00\x73\x00\x20\x00\x61\
-\x00\x6e\x00\x74\x00\x65\x00\x73\x00\x20\x00\x64\x00\x65\x00\x20\
-\x00\x71\x00\x75\x00\x65\x00\x20\x00\x74\x00\x65\x00\x20\x00\x70\
-\x00\x75\x00\x65\x00\x64\x00\x61\x00\x73\x00\x20\x00\x63\x00\x6f\
-\x00\x6e\x00\x65\x00\x63\x00\x74\x00\x61\x00\x72\x00\x20\x00\x70\
-\x00\x6f\x00\x72\x00\x20\x00\x70\x00\x72\x00\x69\x00\x6d\x00\x65\
-\x00\x72\x00\x61\x00\x20\x00\x76\x00\x65\x00\x7a\x00\x2e\x00\x3c\
-\x00\x62\x00\x72\x00\x3e\x00\x3c\x00\x62\x00\x72\x00\x3e\x00\x53\
-\x00\x69\x00\x20\x00\x6e\x00\x65\x00\x63\x00\x65\x00\x73\x00\x69\
-\x00\x74\x00\x61\x00\x73\x00\x20\x00\x6d\x00\x6f\x00\x64\x00\x69\
-\x00\x66\x00\x69\x00\x63\x00\x61\x00\x72\x00\x20\x00\x65\x00\x73\
-\x00\x74\x00\x61\x00\x73\x00\x20\x00\x6f\x00\x70\x00\x63\x00\x69\
-\x00\x6f\x00\x6e\x00\x65\x00\x73\x00\x20\x00\x64\x00\x65\x00\x20\
-\x00\x6e\x00\x75\x00\x65\x00\x76\x00\x6f\x00\x2c\x00\x20\x00\x70\
-\x00\x75\x00\x65\x00\x64\x00\x65\x00\x73\x00\x20\x00\x65\x00\x6e\
-\x00\x63\x00\x6f\x00\x6e\x00\x74\x00\x72\x00\x61\x00\x72\x00\x20\
-\x00\x65\x00\x73\x00\x74\x00\x65\x00\x20\x00\x61\x00\x73\x00\x69\
-\x00\x73\x00\x74\x00\x65\x00\x6e\x00\x74\x00\x65\x00\x20\x00\x65\
-\x00\x6e\x00\x20\x00\x65\x00\x6c\x00\x20\x00\x6d\x00\x65\x00\x6e\
-\x00\x75\x00\x20\x00\x64\x00\x65\x00\x20\x00\x27\x00\x3c\x00\x69\
-\x00\x3e\x00\x4f\x00\x70\x00\x63\x00\x69\x00\x6f\x00\x6e\x00\x65\
-\x00\x73\x00\x3c\x00\x2f\x00\x69\x00\x3e\x00\x27\x00\x20\x00\x65\
-\x00\x6e\x00\x20\x00\x6c\x00\x61\x00\x20\x00\x76\x00\x65\x00\x6e\
-\x00\x74\x00\x61\x00\x6e\x00\x61\x00\x20\x00\x70\x00\x72\x00\x69\
-\x00\x6e\x00\x63\x00\x69\x00\x70\x00\x61\x00\x6c\x00\x2e\x00\x3c\
-\x00\x62\x00\x72\x00\x3e\x00\x3c\x00\x62\x00\x72\x00\x3e\x00\x51\
-\x00\x75\x00\x69\x00\x65\x00\x72\x00\x65\x00\x73\x00\x20\x00\x3c\
-\x00\x62\x00\x3e\x00\x72\x00\x65\x00\x67\x00\x69\x00\x73\x00\x74\
-\x00\x72\x00\x61\x00\x72\x00\x3c\x00\x2f\x00\x62\x00\x3e\x00\x20\
-\x00\x75\x00\x6e\x00\x61\x00\x20\x00\x6e\x00\x75\x00\x65\x00\x76\
-\x00\x61\x00\x20\x00\x63\x00\x75\x00\x65\x00\x6e\x00\x74\x00\x61\
-\x00\x2c\x00\x20\x00\x6f\x00\x20\x00\x3c\x00\x62\x00\x3e\x00\x6c\
-\x00\x6f\x00\x67\x00\x75\x00\x65\x00\x61\x00\x72\x00\x74\x00\x65\
-\x00\x3c\x00\x2f\x00\x62\x00\x3e\x00\x20\x00\x63\x00\x6f\x00\x6e\
-\x00\x20\x00\x74\x00\x75\x00\x20\x00\x75\x00\x73\x00\x75\x00\x61\
-\x00\x72\x00\x69\x00\x6f\x00\x3f\x00\x3c\x00\x62\x00\x72\x00\x3e\
-\x00\x20\x08\x00\x00\x00\x00\x06\x00\x00\x01\x5d\x4e\x6f\x77\x20\
-\x77\x65\x20\x77\x69\x6c\x6c\x20\x67\x75\x69\x64\x65\x20\x79\x6f\
-\x75\x20\x74\x68\x72\x6f\x75\x67\x68\x20\x73\x6f\x6d\x65\x20\x63\
-\x6f\x6e\x66\x69\x67\x75\x72\x61\x74\x69\x6f\x6e\x20\x74\x68\x61\
-\x74\x20\x69\x73\x20\x6e\x65\x65\x64\x65\x64\x20\x62\x65\x66\x6f\
-\x72\x65\x20\x79\x6f\x75\x20\x63\x61\x6e\x20\x63\x6f\x6e\x6e\x65\
-\x63\x74\x20\x66\x6f\x72\x20\x74\x68\x65\x20\x66\x69\x72\x73\x74\
-\x20\x74\x69\x6d\x65\x2e\x3c\x62\x72\x3e\x3c\x62\x72\x3e\x49\x66\
-\x20\x79\x6f\x75\x20\x65\x76\x65\x72\x20\x6e\x65\x65\x64\x20\x74\
-\x6f\x20\x6d\x6f\x64\x69\x66\x79\x20\x74\x68\x65\x73\x65\x20\x6f\
-\x70\x74\x69\x6f\x6e\x73\x20\x61\x67\x61\x69\x6e\x2c\x20\x79\x6f\
-\x75\x20\x63\x61\x6e\x20\x66\x69\x6e\x64\x20\x74\x68\x65\x20\x77\
-\x69\x7a\x61\x72\x64\x20\x69\x6e\x20\x74\x68\x65\x20\x27\x3c\x69\
-\x3e\x53\x65\x74\x74\x69\x6e\x67\x73\x3c\x2f\x69\x3e\x27\x20\x6d\
-\x65\x6e\x75\x20\x66\x72\x6f\x6d\x20\x74\x68\x65\x20\x6d\x61\x69\
-\x6e\x20\x77\x69\x6e\x64\x6f\x77\x2e\x3c\x62\x72\x3e\x3c\x62\x72\
-\x3e\x44\x6f\x20\x79\x6f\x75\x20\x77\x61\x6e\x74\x20\x74\x6f\x20\
-\x3c\x62\x3e\x73\x69\x67\x6e\x20\x75\x70\x3c\x2f\x62\x3e\x20\x66\
-\x6f\x72\x20\x61\x20\x6e\x65\x77\x20\x61\x63\x63\x6f\x75\x6e\x74\
-\x2c\x20\x6f\x72\x20\x3c\x62\x3e\x6c\x6f\x67\x20\x69\x6e\x3c\x2f\
-\x62\x3e\x20\x77\x69\x74\x68\x20\x61\x6e\x20\x61\x6c\x72\x65\x61\
-\x64\x79\x20\x65\x78\x69\x73\x74\x69\x6e\x67\x20\x75\x73\x65\x72\
-\x6e\x61\x6d\x65\x3f\x3c\x62\x72\x3e\x07\x00\x00\x00\x09\x49\x6e\
-\x74\x72\x6f\x50\x61\x67\x65\x01\x03\x00\x00\x00\x36\x00\x52\x00\
-\x65\x00\x67\x00\x69\x00\x73\x00\x74\x00\x72\x00\x61\x00\x72\x00\
-\x20\x00\x75\x00\x6e\x00\x61\x00\x20\x00\x63\x00\x75\x00\x65\x00\
-\x6e\x00\x74\x00\x61\x00\x20\x00\x6e\x00\x75\x00\x65\x00\x76\x00\
-\x61\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x1a\x53\x69\x67\
-\x6e\x20\x75\x70\x20\x66\x6f\x72\x20\x61\x20\x6e\x65\x77\x20\x61\
-\x63\x63\x6f\x75\x6e\x74\x2e\x07\x00\x00\x00\x09\x49\x6e\x74\x72\
-\x6f\x50\x61\x67\x65\x01\x03\x00\x00\x00\x34\x00\x45\x00\x72\x00\
-\x72\x00\x6f\x00\x72\x00\x20\x00\x64\x00\x65\x00\x20\x00\x61\x00\
-\x75\x00\x74\x00\x65\x00\x6e\x00\x74\x00\x69\x00\x63\x00\x61\x00\
-\x63\x00\x69\x00\x6f\x00\x6e\x00\x3a\x00\x20\x00\x25\x00\x73\x08\
-\x00\x00\x00\x00\x06\x00\x00\x00\x18\x41\x75\x74\x68\x65\x6e\x74\
-\x69\x63\x61\x74\x69\x6f\x6e\x20\x65\x72\x72\x6f\x72\x3a\x20\x25\
-\x73\x07\x00\x00\x00\x09\x4c\x6f\x67\x49\x6e\x50\x61\x67\x65\x01\
-\x03\x00\x00\x00\x2e\x00\x43\x00\x72\x00\x65\x00\x64\x00\x65\x00\
-\x6e\x00\x63\x00\x69\x00\x61\x00\x6c\x00\x65\x00\x73\x00\x20\x00\
-\x76\x00\x61\x00\x6c\x00\x69\x00\x64\x00\x61\x00\x64\x00\x61\x00\
-\x73\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x16\x43\x72\x65\
-\x64\x65\x6e\x74\x69\x61\x6c\x73\x20\x76\x61\x6c\x69\x64\x61\x74\
-\x65\x64\x2e\x07\x00\x00\x00\x09\x4c\x6f\x67\x49\x6e\x50\x61\x67\
-\x65\x01\x03\x00\x00\x00\x3a\x00\x52\x00\x65\x00\x73\x00\x6f\x00\
-\x6c\x00\x76\x00\x69\x00\x65\x00\x6e\x00\x64\x00\x6f\x00\x20\x00\
-\x6e\x00\x6f\x00\x6d\x00\x62\x00\x72\x00\x65\x00\x20\x00\x64\x00\
-\x65\x00\x20\x00\x64\x00\x6f\x00\x6d\x00\x69\x00\x6e\x00\x69\x00\
-\x6f\x08\x00\x00\x00\x00\x06\x00\x00\x00\x15\x52\x65\x73\x6f\x6c\
-\x76\x69\x6e\x67\x20\x64\x6f\x6d\x61\x69\x6e\x20\x6e\x61\x6d\x65\
-\x07\x00\x00\x00\x09\x4c\x6f\x67\x49\x6e\x50\x61\x67\x65\x01\x03\
-\x00\x00\x00\x5a\x00\x45\x00\x6c\x00\x20\x00\x75\x00\x73\x00\x75\
-\x00\x61\x00\x72\x00\x69\x00\x6f\x00\x20\x00\x74\x00\x69\x00\x65\
-\x00\x6e\x00\x65\x00\x20\x00\x71\x00\x75\x00\x65\x00\x20\x00\x73\
-\x00\x65\x00\x72\x00\x20\x00\x75\x00\x73\x00\x75\x00\x61\x00\x72\
-\x00\x69\x00\x6f\x00\x40\x00\x74\x00\x75\x00\x2e\x00\x70\x00\x72\
-\x00\x6f\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\x00\x72\x08\x00\
-\x00\x00\x00\x06\x00\x00\x00\x2f\x55\x73\x65\x72\x6e\x61\x6d\x65\
-\x20\x6d\x75\x73\x74\x20\x62\x65\x20\x69\x6e\x20\x74\x68\x65\x20\
-\x75\x73\x65\x72\x6e\x61\x6d\x65\x40\x70\x72\x6f\x76\x69\x64\x65\
-\x72\x20\x66\x6f\x72\x6d\x2e\x07\x00\x00\x00\x09\x4c\x6f\x67\x49\
-\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x24\x00\x49\x00\x6e\x00\
-\x66\x00\x6f\x00\x20\x00\x64\x00\x65\x00\x6c\x00\x20\x00\x50\x00\
-\x72\x00\x6f\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\x00\x72\x08\
-\x00\x00\x00\x00\x06\x00\x00\x00\x0d\x50\x72\x6f\x76\x69\x64\x65\
-\x72\x20\x49\x6e\x66\x6f\x07\x00\x00\x00\x10\x50\x72\x6f\x76\x69\
-\x64\x65\x72\x49\x6e\x66\x6f\x50\x61\x67\x65\x01\x03\x00\x00\x00\
-\x42\x00\x45\x00\x73\x00\x74\x00\x6f\x00\x20\x00\x65\x00\x73\x00\
-\x20\x00\x6c\x00\x6f\x00\x20\x00\x71\x00\x75\x00\x65\x00\x20\x00\
-\x64\x00\x69\x00\x63\x00\x65\x00\x20\x00\x65\x00\x6c\x00\x20\x00\
-\x70\x00\x72\x00\x6f\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\x00\
-\x72\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x1b\x54\x68\x69\
-\x73\x20\x69\x73\x20\x77\x68\x61\x74\x20\x70\x72\x6f\x76\x69\x64\
-\x65\x72\x20\x73\x61\x79\x73\x2e\x07\x00\x00\x00\x10\x50\x72\x6f\
-\x76\x69\x64\x65\x72\x49\x6e\x66\x6f\x50\x61\x67\x65\x01\x03\x00\
-\x00\x00\x46\x00\x43\x00\x6f\x00\x6d\x00\x70\x00\x72\x00\x6f\x00\
-\x62\x00\x61\x00\x6e\x00\x64\x00\x6f\x00\x20\x00\x65\x00\x6c\x00\
-\x20\x00\x66\x00\x69\x00\x6e\x00\x67\x00\x65\x00\x72\x00\x70\x00\
-\x72\x00\x69\x00\x6e\x00\x74\x00\x20\x00\x64\x00\x65\x00\x20\x00\
-\x6c\x00\x61\x00\x20\x00\x43\x00\x41\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x17\x43\x68\x65\x63\x6b\x69\x6e\x67\x20\x43\x41\x20\x66\
-\x69\x6e\x67\x65\x72\x70\x72\x69\x6e\x74\x07\x00\x00\x00\x1b\x50\
-\x72\x6f\x76\x69\x64\x65\x72\x53\x65\x74\x75\x70\x56\x61\x6c\x69\
-\x64\x61\x74\x69\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x22\
-\x00\x41\x00\x75\x00\x74\x00\x6f\x00\x63\x00\x6f\x00\x6e\x00\x66\
-\x00\x69\x00\x67\x00\x75\x00\x72\x00\x61\x00\x6e\x00\x64\x00\x6f\
-\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x11\x44\x6f\x69\x6e\
-\x67\x20\x61\x75\x74\x6f\x63\x6f\x6e\x66\x69\x67\x2e\x07\x00\x00\
-\x00\x1b\x50\x72\x6f\x76\x69\x64\x65\x72\x53\x65\x74\x75\x70\x56\
-\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\
-\x00\x00\x3e\x00\x4f\x00\x62\x00\x74\x00\x65\x00\x6e\x00\x69\x00\
-\x65\x00\x6e\x00\x64\x00\x6f\x00\x20\x00\x63\x00\x65\x00\x72\x00\
-\x74\x00\x69\x00\x66\x00\x69\x00\x63\x00\x61\x00\x64\x00\x6f\x00\
-\x20\x00\x64\x00\x65\x00\x20\x00\x6c\x00\x61\x00\x20\x00\x43\x00\
-\x41\x08\x00\x00\x00\x00\x06\x00\x00\x00\x17\x46\x65\x74\x63\x68\
-\x69\x6e\x67\x20\x43\x41\x20\x63\x65\x72\x74\x69\x66\x69\x63\x61\
-\x74\x65\x07\x00\x00\x00\x1b\x50\x72\x6f\x76\x69\x64\x65\x72\x53\
-\x65\x74\x75\x70\x56\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\x50\x61\
-\x67\x65\x01\x03\x00\x00\x00\x36\x00\x43\x00\x6f\x00\x6e\x00\x66\
-\x00\x69\x00\x67\x00\x75\x00\x72\x00\x61\x00\x63\x00\x69\x00\x6f\
-\x00\x6e\x00\x20\x00\x64\x00\x65\x00\x6c\x00\x20\x00\x50\x00\x72\
-\x00\x6f\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\x00\x72\x08\x00\
-\x00\x00\x00\x06\x00\x00\x00\x0e\x50\x72\x6f\x76\x69\x64\x65\x72\
-\x20\x73\x65\x74\x75\x70\x07\x00\x00\x00\x1b\x50\x72\x6f\x76\x69\
-\x64\x65\x72\x53\x65\x74\x75\x70\x56\x61\x6c\x69\x64\x61\x74\x69\
-\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x3e\x00\x56\x00\x61\
-\x00\x6c\x00\x69\x00\x64\x00\x61\x00\x6e\x00\x64\x00\x6f\x00\x20\
-\x00\x63\x00\x65\x00\x72\x00\x74\x00\x69\x00\x66\x00\x69\x00\x63\
-\x00\x61\x00\x64\x00\x6f\x00\x20\x00\x64\x00\x65\x00\x20\x00\x6c\
-\x00\x61\x00\x20\x00\x61\x00\x70\x00\x69\x08\x00\x00\x00\x00\x06\
-\x00\x00\x00\x1a\x56\x61\x6c\x69\x64\x61\x74\x69\x6e\x67\x20\x61\
-\x70\x69\x20\x63\x65\x72\x74\x69\x66\x69\x63\x61\x74\x65\x07\x00\
-\x00\x00\x1b\x50\x72\x6f\x76\x69\x64\x65\x72\x53\x65\x74\x75\x70\
-\x56\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\x50\x61\x67\x65\x01\x03\
-\x00\x00\x00\x46\x00\x4e\x00\x6f\x00\x20\x00\x73\x00\x65\x00\x20\
-\x00\x70\x00\x75\x00\x64\x00\x6f\x00\x20\x00\x72\x00\x65\x00\x67\
-\x00\x69\x00\x73\x00\x74\x00\x72\x00\x61\x00\x72\x00\x20\x00\x28\
-\x00\x62\x00\x61\x00\x64\x00\x20\x00\x72\x00\x65\x00\x73\x00\x70\
-\x00\x6f\x00\x6e\x00\x73\x00\x65\x00\x29\x08\x00\x00\x00\x00\x06\
-\x00\x00\x00\x21\x43\x6f\x75\x6c\x64\x20\x6e\x6f\x74\x20\x72\x65\
-\x67\x69\x73\x74\x65\x72\x20\x28\x62\x61\x64\x20\x72\x65\x73\x70\
-\x6f\x6e\x73\x65\x29\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x54\
-\x00\x45\x00\x72\x00\x72\x00\x6f\x00\x72\x00\x20\x00\x63\x00\x6f\
-\x00\x6e\x00\x65\x00\x63\x00\x74\x00\x61\x00\x6e\x00\x64\x00\x6f\
-\x00\x73\x00\x65\x00\x20\x00\x61\x00\x6c\x00\x20\x00\x70\x00\x72\
-\x00\x6f\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\x00\x72\x00\x20\
-\x00\x28\x00\x63\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x72\x00\x72\
-\x00\x29\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x27\x45\x72\
-\x72\x6f\x72\x20\x43\x6f\x6e\x6e\x65\x63\x74\x69\x6e\x67\x20\x74\
-\x6f\x20\x70\x72\x6f\x76\x69\x64\x65\x72\x20\x28\x63\x6f\x6e\x6e\
-\x65\x72\x72\x29\x2e\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x52\
-\x00\x45\x00\x72\x00\x72\x00\x6f\x00\x72\x00\x20\x00\x63\x00\x6f\
-\x00\x6e\x00\x65\x00\x63\x00\x74\x00\x61\x00\x6e\x00\x64\x00\x6f\
-\x00\x73\x00\x65\x00\x20\x00\x61\x00\x6c\x00\x20\x00\x70\x00\x72\
-\x00\x6f\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\x00\x72\x00\x20\
-\x00\x28\x00\x74\x00\x69\x00\x6d\x00\x65\x00\x6f\x00\x75\x00\x74\
-\x00\x29\x08\x00\x00\x00\x00\x06\x00\x00\x00\x26\x45\x72\x72\x6f\
-\x72\x20\x63\x6f\x6e\x6e\x65\x63\x74\x69\x6e\x67\x20\x74\x6f\x20\
-\x70\x72\x6f\x76\x69\x64\x65\x72\x20\x28\x74\x69\x6d\x65\x6f\x75\
-\x74\x29\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\
-\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x3c\x00\x45\x00\
-\x72\x00\x72\x00\x6f\x00\x72\x00\x20\x00\x64\x00\x75\x00\x72\x00\
-\x61\x00\x6e\x00\x74\x00\x65\x00\x20\x00\x65\x00\x6c\x00\x20\x00\
-\x72\x00\x65\x00\x67\x00\x69\x00\x73\x00\x74\x00\x72\x00\x6f\x00\
-\x20\x00\x28\x00\x25\x00\x73\x00\x29\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x1e\x45\x72\x72\x6f\x72\x20\x64\x75\x72\x69\x6e\x67\x20\
-\x72\x65\x67\x69\x73\x74\x72\x61\x74\x69\x6f\x6e\x20\x28\x25\x73\
-\x29\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\
-\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x40\x00\x4c\x00\x61\
-\x00\x73\x00\x20\x00\x63\x00\x6f\x00\x6e\x00\x74\x00\x72\x00\x61\
-\x00\x73\x00\x65\x00\x6e\x00\x61\x00\x73\x00\x20\x00\x6e\x00\x6f\
-\x00\x20\x00\x73\x00\x6f\x00\x6e\x00\x20\x00\x69\x00\x67\x00\x75\
-\x00\x61\x00\x6c\x00\x65\x00\x73\x00\x2e\x00\x2e\x08\x00\x00\x00\
-\x00\x06\x00\x00\x00\x19\x50\x61\x73\x73\x77\x6f\x72\x64\x20\x64\
-\x6f\x65\x73\x20\x6e\x6f\x74\x20\x6d\x61\x74\x63\x68\x2e\x2e\x07\
-\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x36\x00\x43\x00\x6f\x00\x6e\
-\x00\x74\x00\x72\x00\x61\x00\x73\x00\x65\x00\x6e\x00\x61\x00\x20\
-\x00\x64\x00\x65\x00\x6d\x00\x61\x00\x73\x00\x69\x00\x61\x00\x64\
-\x00\x6f\x00\x20\x00\x6f\x00\x62\x00\x76\x00\x69\x00\x61\x00\x2e\
-\x08\x00\x00\x00\x00\x06\x00\x00\x00\x15\x50\x61\x73\x73\x77\x6f\
-\x72\x64\x20\x74\x6f\x6f\x20\x6f\x62\x76\x69\x6f\x75\x73\x2e\x07\
-\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x36\x00\x43\x00\x6f\x00\x6e\
-\x00\x74\x00\x72\x00\x61\x00\x73\x00\x65\x00\x6e\x00\x61\x00\x20\
-\x00\x64\x00\x65\x00\x6d\x00\x61\x00\x73\x00\x69\x00\x61\x00\x64\
-\x00\x6f\x00\x20\x00\x63\x00\x6f\x00\x72\x00\x74\x00\x61\x00\x2e\
-\x08\x00\x00\x00\x00\x06\x00\x00\x00\x13\x50\x61\x73\x73\x77\x6f\
-\x72\x64\x20\x74\x6f\x6f\x20\x73\x68\x6f\x72\x74\x2e\x07\x00\x00\
-\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\x50\x61\
-\x67\x65\x01\x03\x00\x00\x00\x5e\x00\x52\x00\x65\x00\x67\x00\x69\
-\x00\x73\x00\x74\x00\x72\x00\x61\x00\x72\x00\x20\x00\x75\x00\x6e\
-\x00\x20\x00\x6e\x00\x75\x00\x65\x00\x76\x00\x6f\x00\x20\x00\x75\
-\x00\x73\x00\x75\x00\x61\x00\x72\x00\x69\x00\x6f\x00\x20\x00\x63\
-\x00\x6f\x00\x6e\x00\x20\x00\x65\x00\x6c\x00\x20\x00\x70\x00\x72\
-\x00\x6f\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\x00\x72\x00\x20\
-\x00\x25\x00\x73\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x25\
-\x52\x65\x67\x69\x73\x74\x65\x72\x20\x61\x20\x6e\x65\x77\x20\x75\
-\x73\x65\x72\x20\x77\x69\x74\x68\x20\x70\x72\x6f\x76\x69\x64\x65\
-\x72\x20\x25\x73\x2e\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x30\
-\x00\x43\x00\x75\x00\x65\x00\x6e\x00\x74\x00\x61\x00\x20\x00\x63\
-\x00\x72\x00\x65\x00\x61\x00\x64\x00\x61\x00\x20\x00\x63\x00\x6f\
-\x00\x6e\x00\x20\x00\x65\x00\x78\x00\x69\x00\x74\x00\x6f\x00\x21\
-\x08\x00\x00\x00\x00\x06\x00\x00\x00\x17\x52\x65\x67\x69\x73\x74\
-\x72\x61\x74\x69\x6f\x6e\x20\x73\x75\x63\x63\x65\x65\x64\x65\x64\
-\x21\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\
-\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x18\x00\x4e\x00\x75\
-\x00\x65\x00\x76\x00\x61\x00\x20\x00\x43\x00\x75\x00\x65\x00\x6e\
-\x00\x74\x00\x61\x08\x00\x00\x00\x00\x06\x00\x00\x00\x07\x53\x69\
-\x67\x6e\x20\x55\x70\x07\x00\x00\x00\x10\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x2c\
-\x00\x55\x00\x73\x00\x75\x00\x61\x00\x72\x00\x69\x00\x6f\x00\x20\
-\x00\x6e\x00\x6f\x00\x20\x00\x64\x00\x69\x00\x73\x00\x70\x00\x6f\
-\x00\x6e\x00\x69\x00\x62\x00\x6c\x00\x65\x00\x2e\x08\x00\x00\x00\
-\x00\x06\x00\x00\x00\x17\x55\x73\x65\x72\x6e\x61\x6d\x65\x20\x6e\
-\x6f\x74\x20\x61\x76\x61\x69\x6c\x61\x62\x6c\x65\x2e\x07\x00\x00\
-\x00\x10\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\x50\x61\
-\x67\x65\x01\x03\x00\x00\x00\x34\x00\x45\x00\x72\x00\x72\x00\x6f\
-\x00\x72\x00\x20\x00\x64\x00\x65\x00\x20\x00\x61\x00\x75\x00\x74\
-\x00\x65\x00\x6e\x00\x74\x00\x69\x00\x63\x00\x61\x00\x63\x00\x69\
-\x00\x6f\x00\x6e\x00\x3a\x00\x20\x00\x25\x00\x73\x08\x00\x00\x00\
-\x00\x06\x00\x00\x00\x18\x41\x75\x74\x68\x65\x6e\x74\x69\x63\x61\
-\x74\x69\x6f\x6e\x20\x65\x72\x72\x6f\x72\x3a\x20\x25\x73\x07\x00\
-\x00\x00\x1a\x52\x65\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\x56\
-\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\
-\x00\x00\x34\x00\x4f\x00\x62\x00\x74\x00\x65\x00\x6e\x00\x69\x00\
-\x65\x00\x6e\x00\x64\x00\x6f\x00\x20\x00\x63\x00\x65\x00\x72\x00\
-\x74\x00\x69\x00\x66\x00\x69\x00\x63\x00\x61\x00\x64\x00\x6f\x00\
-\x20\x00\x65\x00\x69\x00\x70\x08\x00\x00\x00\x00\x06\x00\x00\x00\
-\x18\x46\x65\x74\x63\x68\x69\x6e\x67\x20\x65\x69\x70\x20\x63\x65\
-\x72\x74\x69\x66\x69\x63\x61\x74\x65\x07\x00\x00\x00\x1a\x52\x65\
-\x67\x69\x73\x74\x65\x72\x55\x73\x65\x72\x56\x61\x6c\x69\x64\x61\
-\x74\x69\x6f\x6e\x50\x61\x67\x65\x01\x03\x00\x00\x00\x52\x00\x4f\
-\x00\x62\x00\x74\x00\x65\x00\x6e\x00\x69\x00\x65\x00\x6e\x00\x64\
-\x00\x6f\x00\x20\x00\x63\x00\x6f\x00\x6e\x00\x66\x00\x69\x00\x67\
-\x00\x75\x00\x72\x00\x61\x00\x63\x00\x69\x00\x6f\x00\x6e\x00\x20\
-\x00\x64\x00\x65\x00\x6c\x00\x20\x00\x70\x00\x72\x00\x6f\x00\x76\
-\x00\x65\x00\x65\x00\x64\x00\x6f\x00\x72\x00\x2e\x00\x2e\x00\x2e\
-\x08\x00\x00\x00\x00\x06\x00\x00\x00\x1b\x46\x65\x74\x63\x68\x69\
-\x6e\x67\x20\x70\x72\x6f\x76\x69\x64\x65\x72\x20\x63\x6f\x6e\x66\
-\x69\x67\x2e\x2e\x2e\x07\x00\x00\x00\x1a\x52\x65\x67\x69\x73\x74\
-\x65\x72\x55\x73\x65\x72\x56\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x34\x00\x56\x00\x61\x00\x6c\
-\x00\x69\x00\x64\x00\x61\x00\x63\x00\x69\x00\x6f\x00\x6e\x00\x20\
-\x00\x64\x00\x65\x00\x6c\x00\x20\x00\x63\x00\x65\x00\x72\x00\x74\
-\x00\x69\x00\x66\x00\x69\x00\x63\x00\x61\x00\x64\x00\x6f\x08\x00\
-\x00\x00\x00\x06\x00\x00\x00\x16\x43\x65\x72\x74\x69\x66\x69\x63\
-\x61\x74\x65\x20\x76\x61\x6c\x69\x64\x61\x74\x69\x6f\x6e\x07\x00\
-\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\
-\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x6c\x00\x6e\x00\x6f\x00\
-\x20\x00\x73\x00\x65\x00\x20\x00\x70\x00\x75\x00\x64\x00\x6f\x00\
-\x20\x00\x6f\x00\x62\x00\x74\x00\x65\x00\x6e\x00\x65\x00\x72\x00\
-\x20\x00\x69\x00\x6e\x00\x66\x00\x6f\x00\x20\x00\x64\x00\x65\x00\
-\x6c\x00\x20\x00\x70\x00\x72\x00\x6f\x00\x76\x00\x65\x00\x65\x00\
-\x64\x00\x6f\x00\x72\x00\x20\x00\x28\x00\x72\x00\x65\x00\x66\x00\
-\x75\x00\x73\x00\x65\x00\x64\x00\x20\x00\x63\x00\x6f\x00\x6e\x00\
-\x6e\x00\x2e\x00\x29\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\
-\x31\x43\x6f\x75\x6c\x64\x20\x6e\x6f\x74\x20\x64\x6f\x77\x6e\x6c\
-\x6f\x61\x64\x20\x70\x72\x6f\x76\x69\x64\x65\x72\x20\x69\x6e\x66\
-\x6f\x20\x28\x72\x65\x66\x75\x73\x65\x64\x20\x63\x6f\x6e\x6e\x2e\
-\x29\x2e\x07\x00\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\
-\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x4a\x00\
-\x6e\x00\x6f\x00\x20\x00\x73\x00\x65\x00\x20\x00\x70\x00\x75\x00\
-\x64\x00\x6f\x00\x20\x00\x6f\x00\x62\x00\x74\x00\x65\x00\x6e\x00\
-\x65\x00\x72\x00\x20\x00\x69\x00\x6e\x00\x66\x00\x6f\x00\x20\x00\
-\x64\x00\x65\x00\x6c\x00\x20\x00\x70\x00\x72\x00\x6f\x00\x76\x00\
-\x65\x00\x65\x00\x64\x00\x6f\x00\x72\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x21\x43\x6f\x75\x6c\x64\x20\x6e\x6f\x74\x20\x67\x65\x74\
-\x20\x69\x6e\x66\x6f\x20\x66\x72\x6f\x6d\x20\x70\x72\x6f\x76\x69\
-\x64\x65\x72\x2e\x07\x00\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\
-\x72\x6f\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\
-\x24\x00\x45\x00\x6e\x00\x74\x00\x72\x00\x61\x00\x20\x00\x74\x00\
-\x75\x00\x20\x00\x50\x00\x72\x00\x6f\x00\x76\x00\x65\x00\x65\x00\
-\x64\x00\x6f\x00\x72\x08\x00\x00\x00\x00\x06\x00\x00\x00\x0e\x45\
-\x6e\x74\x65\x72\x20\x50\x72\x6f\x76\x69\x64\x65\x72\x07\x00\x00\
-\x00\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\
-\x50\x61\x67\x65\x01\x03\x00\x00\x00\x9c\x00\x50\x00\x6f\x00\x72\
-\x00\x20\x00\x66\x00\x61\x00\x76\x00\x6f\x00\x72\x00\x2c\x00\x20\
-\x00\x72\x00\x65\x00\x6c\x00\x6c\x00\x65\x00\x6e\x00\x61\x00\x20\
-\x00\x65\x00\x6c\x00\x20\x00\x64\x00\x6f\x00\x6d\x00\x69\x00\x6e\
-\x00\x69\x00\x6f\x00\x20\x00\x64\x00\x65\x00\x6c\x00\x20\x00\x70\
-\x00\x72\x00\x6f\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\x00\x72\
-\x00\x20\x00\x71\x00\x75\x00\x65\x00\x20\x00\x71\x00\x75\x00\x69\
-\x00\x65\x00\x72\x00\x61\x00\x73\x00\x20\x00\x75\x00\x73\x00\x61\
-\x00\x72\x00\x20\x00\x70\x00\x61\x00\x72\x00\x61\x00\x20\x00\x74\
-\x00\x75\x00\x20\x00\x63\x00\x6f\x00\x6e\x00\x65\x00\x78\x00\x69\
-\x00\x6f\x00\x6e\x00\x2e\x08\x00\x00\x00\x00\x06\x00\x00\x00\x4c\
-\x50\x6c\x65\x61\x73\x65\x20\x65\x6e\x74\x65\x72\x20\x74\x68\x65\
-\x20\x64\x6f\x6d\x61\x69\x6e\x20\x6f\x66\x20\x74\x68\x65\x20\x70\
-\x72\x6f\x76\x69\x64\x65\x72\x20\x79\x6f\x75\x20\x77\x61\x6e\x74\
-\x20\x74\x6f\x20\x75\x73\x65\x20\x66\x6f\x72\x20\x79\x6f\x75\x72\
-\x20\x63\x6f\x6e\x6e\x65\x63\x74\x69\x6f\x6e\x2e\x07\x00\x00\x00\
-\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\
-\x61\x67\x65\x01\x03\x00\x00\x00\x62\x00\x4e\x00\x6f\x00\x20\x00\
-\x73\x00\x65\x00\x20\x00\x70\x00\x75\x00\x64\x00\x6f\x00\x20\x00\
-\x76\x00\x65\x00\x72\x00\x69\x00\x66\x00\x69\x00\x63\x00\x61\x00\
-\x72\x00\x20\x00\x65\x00\x6c\x00\x20\x00\x63\x00\x65\x00\x72\x00\
-\x74\x00\x69\x00\x66\x00\x69\x00\x63\x00\x61\x00\x64\x00\x6f\x00\
-\x20\x00\x64\x00\x65\x00\x6c\x00\x20\x00\x73\x00\x65\x00\x72\x00\
-\x76\x00\x69\x00\x64\x00\x6f\x00\x72\x00\x2e\x08\x00\x00\x00\x00\
-\x06\x00\x00\x00\x29\x53\x65\x72\x76\x65\x72\x20\x63\x65\x72\x74\
-\x69\x66\x69\x63\x61\x74\x65\x20\x63\x6f\x75\x6c\x64\x20\x6e\x6f\
-\x74\x20\x62\x65\x20\x76\x65\x72\x69\x66\x69\x65\x64\x2e\x07\x00\
-\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\
-\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x16\x00\x63\x00\x6f\x00\
-\x6d\x00\x70\x00\x72\x00\x6f\x00\x26\x00\x62\x00\x61\x00\x72\x00\
-\x21\x08\x00\x00\x00\x00\x06\x00\x00\x00\x07\x63\x68\x65\x63\x26\
-\x6b\x21\x07\x00\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\
-\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x03\x00\x00\x00\x3a\x00\
-\x63\x00\x6f\x00\x6d\x00\x70\x00\x72\x00\x6f\x00\x62\x00\x61\x00\
-\x6e\x00\x64\x00\x6f\x00\x20\x00\x6e\x00\x6f\x00\x6d\x00\x62\x00\
-\x72\x00\x65\x00\x20\x00\x64\x00\x65\x00\x20\x00\x64\x00\x6f\x00\
-\x6d\x00\x69\x00\x6e\x00\x69\x00\x6f\x08\x00\x00\x00\x00\x06\x00\
-\x00\x00\x14\x63\x68\x65\x63\x6b\x69\x6e\x67\x20\x64\x6f\x6d\x61\
-\x69\x6e\x20\x6e\x61\x6d\x65\x07\x00\x00\x00\x12\x53\x65\x6c\x65\
-\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x03\
-\x00\x00\x00\x34\x00\x63\x00\x6f\x00\x6d\x00\x70\x00\x72\x00\x6f\
-\x00\x62\x00\x61\x00\x6e\x00\x64\x00\x6f\x00\x20\x00\x63\x00\x6f\
-\x00\x6e\x00\x65\x00\x78\x00\x69\x00\x6f\x00\x6e\x00\x20\x00\x68\
-\x00\x74\x00\x74\x00\x70\x00\x73\x08\x00\x00\x00\x00\x06\x00\x00\
-\x00\x19\x63\x68\x65\x63\x6b\x69\x6e\x67\x20\x68\x74\x74\x70\x73\
-\x20\x63\x6f\x6e\x6e\x65\x63\x74\x69\x6f\x6e\x07\x00\x00\x00\x12\
-\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\x76\x69\x64\x65\x72\x50\x61\
-\x67\x65\x01\x03\x00\x00\x00\x3a\x00\x6f\x00\x62\x00\x74\x00\x65\
-\x00\x6e\x00\x69\x00\x65\x00\x6e\x00\x64\x00\x6f\x00\x20\x00\x69\
-\x00\x6e\x00\x66\x00\x6f\x00\x20\x00\x64\x00\x65\x00\x6c\x00\x20\
-\x00\x70\x00\x72\x00\x65\x00\x76\x00\x65\x00\x65\x00\x64\x00\x6f\
-\x00\x72\x08\x00\x00\x00\x00\x06\x00\x00\x00\x16\x66\x65\x74\x63\
-\x68\x69\x6e\x67\x20\x70\x72\x6f\x76\x69\x64\x65\x72\x20\x69\x6e\
-\x66\x6f\x07\x00\x00\x00\x12\x53\x65\x6c\x65\x63\x74\x50\x72\x6f\
-\x76\x69\x64\x65\x72\x50\x61\x67\x65\x01\x88\x00\x00\x00\x02\x01\
-\x01\
-"
-
-qt_resource_name = "\
-\x00\x0c\
-\x0d\xfc\x11\x13\
-\x00\x74\
-\x00\x72\x00\x61\x00\x6e\x00\x73\x00\x6c\x00\x61\x00\x74\x00\x69\x00\x6f\x00\x6e\x00\x73\
-\x00\x05\
-\x00\x6a\x85\x7d\
-\x00\x64\
-\x00\x65\x00\x2e\x00\x71\x00\x6d\
-\x00\x05\
-\x00\x6c\x65\x7d\
-\x00\x65\
-\x00\x73\x00\x2e\x00\x71\x00\x6d\
-"
-
-qt_resource_struct = "\
-\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
-\x00\x00\x00\x00\x00\x02\x00\x00\x00\x02\x00\x00\x00\x02\
-\x00\x00\x00\x1e\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
-\x00\x00\x00\x2e\x00\x00\x00\x00\x00\x01\x00\x00\x17\x98\
-"
-
-def qInitResources():
- QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
-
-def qCleanupResources():
- QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
-
-qInitResources()
diff --git a/src/leap/gui/mainwindow_rc.py b/src/leap/gui/mainwindow_rc.py
deleted file mode 100644
index 9d16a35e..00000000
--- a/src/leap/gui/mainwindow_rc.py
+++ /dev/null
@@ -1,1130 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# Resource object code
-#
-# Created: Wed Jan 30 06:06:54 2013
-# by: The Resource Compiler for PyQt (Qt v4.8.2)
-#
-# WARNING! All changes made in this file will be lost!
-
-from PyQt4 import QtCore
-
-qt_resource_data = "\
-\x00\x00\x05\x95\
-\x89\
-\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
-\x00\x00\x40\x00\x00\x00\x40\x08\x03\x00\x00\x00\x9d\xb7\x81\xec\
-\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
-\x00\x00\x09\x70\x48\x59\x73\x00\x00\x37\x5d\x00\x00\x37\x5d\x01\
-\x19\x80\x46\x5d\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
-\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
-\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\x1f\x74\x45\x58\
-\x74\x54\x69\x74\x6c\x65\x00\x47\x6e\x6f\x6d\x65\x20\x53\x79\x6d\
-\x62\x6f\x6c\x69\x63\x20\x49\x63\x6f\x6e\x20\x54\x68\x65\x6d\x65\
-\x8e\xa4\x29\xab\x00\x00\x02\x13\x50\x4c\x54\x45\xff\xff\xff\xff\
-\x00\x00\xff\x00\x00\xaa\x00\x00\xbf\x00\x00\xbf\xbf\xbf\xd5\x00\
-\x00\xc6\x00\x00\xc4\x00\x00\xbb\xbb\xbb\xcc\x00\x00\xcf\x00\x00\
-\xcc\x00\x00\xce\x00\x00\xb9\xb9\xb9\xc2\xc2\xc2\xce\x00\x00\xca\
-\x00\x00\xcc\x00\x00\xcd\x00\x00\xcc\x00\x00\xc1\xc1\xc1\xce\x00\
-\x00\xca\x00\x00\xcb\x00\x00\xcd\x00\x00\xcb\x00\x00\xcd\x00\x00\
-\xce\x00\x00\xbe\xbe\xbe\xcc\x00\x00\xbf\xbf\xbf\xbe\xbe\xbe\xcd\
-\x00\x00\xcb\x00\x00\xcc\x00\x00\xcd\x00\x00\xcc\x00\x00\xcc\x00\
-\x00\xbe\xbe\xbe\xbf\xbf\xbf\xcb\x00\x00\xcb\x00\x00\xcb\x00\x00\
-\xcc\x00\x00\xcc\x00\x00\xbf\xbf\xbf\xcd\x00\x00\xcb\x00\x00\xcc\
-\x00\x00\xcc\x00\x00\xbf\xbf\xbf\xcc\x00\x00\xcc\x00\x00\xcc\x00\
-\x00\xbe\xbe\xbe\xcc\x00\x00\xbe\xbe\xbe\xcc\x00\x00\xcc\x00\x00\
-\xcc\x00\x00\xcc\x00\x00\xbe\xbe\xbe\xbe\xbe\xbe\xcc\x00\x00\xcc\
-\x00\x00\xcc\x00\x00\xcc\x00\x00\xbe\xb7\xb7\xbe\xb8\xb8\xbe\xba\
-\xba\xbe\xbc\xbc\xbe\xbd\xbd\xbe\xbe\xbe\xbf\xaa\xaa\xbf\xab\xab\
-\xbf\xac\xac\xbf\xad\xad\xbf\xae\xae\xbf\xb0\xb0\xbf\xb1\xb1\xbf\
-\xb4\xb4\xbf\xb6\xb6\xbf\xb7\xb7\xc0\x9c\x9c\xc0\x9d\x9d\xc0\xa1\
-\xa1\xc0\xa2\xa2\xc0\xa4\xa4\xc0\xa5\xa5\xc0\xa6\xa6\xc0\xa7\xa7\
-\xc0\xa8\xa8\xc1\x8d\x8d\xc1\x91\x91\xc1\x94\x94\xc1\x95\x95\xc1\
-\x96\x96\xc1\x99\x99\xc1\x9c\x9c\xc2\x82\x82\xc2\x87\x87\xc2\x88\
-\x88\xc2\x8d\x8d\xc2\x8e\x8e\xc3\x73\x73\xc3\x74\x74\xc3\x76\x76\
-\xc3\x79\x79\xc3\x7c\x7c\xc3\x7d\x7d\xc3\x7f\x7f\xc4\x67\x67\xc4\
-\x6c\x6c\xc4\x6d\x6d\xc4\x6e\x6e\xc4\x70\x70\xc5\x59\x59\xc5\x5d\
-\x5d\xc5\x5f\x5f\xc5\x62\x62\xc5\x63\x63\xc6\x4c\x4c\xc6\x4f\x4f\
-\xc6\x50\x50\xc6\x53\x53\xc6\x56\x56\xc6\x58\x58\xc7\x3e\x3e\xc7\
-\x41\x41\xc7\x43\x43\xc7\x45\x45\xc7\x46\x46\xc7\x47\x47\xc7\x4b\
-\x4b\xc8\x31\x31\xc8\x35\x35\xc8\x36\x36\xc8\x38\x38\xc8\x3a\x3a\
-\xc8\x3c\x3c\xc9\x22\x22\xc9\x25\x25\xc9\x26\x26\xc9\x27\x27\xc9\
-\x28\x28\xc9\x2a\x2a\xc9\x2d\x2d\xc9\x2e\x2e\xca\x16\x16\xca\x17\
-\x17\xca\x1a\x1a\xca\x1b\x1b\xca\x1c\x1c\xca\x1d\x1d\xca\x1e\x1e\
-\xca\x20\x20\xca\x21\x21\xcb\x07\x07\xcb\x09\x09\xcb\x0a\x0a\xcb\
-\x0c\x0c\xcb\x0d\x0d\xcb\x0e\x0e\xcb\x0f\x0f\xcb\x10\x10\xcb\x11\
-\x11\xcb\x12\x12\xcb\x13\x13\xcc\x00\x00\xcc\x01\x01\xcc\x02\x02\
-\xcc\x03\x03\xcc\x04\x04\xcc\x05\x05\xcc\x06\x06\xcc\x07\x07\xd4\
-\x0d\x79\xbb\x00\x00\x00\x44\x74\x52\x4e\x53\x00\x01\x02\x03\x04\
-\x04\x06\x09\x0d\x0f\x0f\x10\x14\x15\x16\x19\x1a\x1d\x1e\x24\x28\
-\x29\x2a\x30\x36\x3d\x40\x42\x43\x4b\x55\x58\x5e\x60\x63\x64\x65\
-\x6e\x73\x7d\x7f\x8a\x94\x99\x9a\xaa\xb2\xbb\xbc\xc3\xc9\xca\xd2\
-\xd5\xde\xe0\xe3\xe6\xe8\xed\xef\xf6\xf7\xfa\xfa\xfb\xfc\xfd\xef\
-\xfa\x14\xec\x00\x00\x02\x79\x49\x44\x41\x54\x58\xc3\xed\x96\xd7\
-\x5b\x13\x41\x14\x47\x47\x05\x15\x5b\x2c\xa0\x58\x62\xb0\x26\xb6\
-\xa0\x46\xc5\x28\x12\x6c\x49\xae\x0d\x62\x01\xc5\x82\x58\x10\x7b\
-\x2f\xd8\x1b\x8a\xbd\x00\x62\x41\x45\x14\xf5\x04\x51\xff\x44\x1f\
-\x76\x37\x1f\x09\xc9\x66\x37\x8f\xc8\xef\xed\xee\xf7\x9d\xb3\x33\
-\x73\x67\x66\x57\xa9\xfe\x98\x65\x94\xd3\xe3\xf5\xfb\xbd\x1e\xe7\
-\xa8\x8c\xf0\x6c\x77\x40\xf4\x04\xdc\xd9\xf6\xf9\x3c\x9f\xf4\x88\
-\x2f\xcf\x2e\x5f\x50\x22\x71\x29\x29\xb0\xf9\xfe\x04\x5e\xa4\xc4\
-\xd6\x18\xb2\x7d\xd2\x2b\x3e\x3b\xeb\xe0\x96\x24\x71\xdb\xe8\x5f\
-\x20\x99\x20\x60\xbd\x9b\x4e\x83\x29\x72\x39\x1c\xae\x22\xa3\x72\
-\x5a\x16\x78\x0c\x3e\x47\x29\xa5\x72\x0c\x83\xc7\xb2\xc0\xab\x13\
-\x2e\xad\x74\xe9\xa5\xd7\xb2\xc0\xaf\x13\x0e\xad\x74\xe8\xa5\xdf\
-\x12\x3c\x31\x57\x29\x63\xd2\xc6\xb3\x58\x3d\x60\xda\xa0\x74\x7c\
-\x6e\x71\xf1\x94\x94\x82\x21\x0b\x98\x9b\x86\x1f\xb9\x0c\x56\xcf\
-\x4c\x21\xd8\xb4\x18\x98\x65\x2e\x28\x04\xe0\x66\x38\x99\xe0\x60\
-\x0b\xc0\x9a\x7c\x53\xc1\x84\xe5\x00\x3c\xd9\xda\x7b\x1f\x1d\x6e\
-\x07\x60\xce\x40\xf3\x21\x8c\x7e\x01\xc0\xf3\x0d\x89\xfc\xd9\x2e\
-\x80\xee\xa3\x69\xbb\xb0\xed\x29\x00\x3f\x0f\xc4\xe1\xa1\x6b\x00\
-\x7c\x39\x22\x69\x05\x12\xbe\x05\xc0\xa7\x43\x3d\xf8\x48\x23\x00\
-\xad\x15\x62\x41\x20\x72\xac\x1b\x20\x7a\x26\xc6\x57\xb5\x01\xd0\
-\x54\x26\xd6\x04\x52\xd7\x01\xc0\x83\xa0\xc6\x57\x77\x02\x70\x2f\
-\x28\x56\x05\x52\xf9\x16\x80\x87\xe5\x22\x22\x27\x7f\x03\x44\xaf\
-\xc4\xb7\xd6\x5c\x20\x65\xcd\x00\xbc\xdf\x29\x72\x19\x80\x3f\xa7\
-\xc4\x96\x40\x82\x0d\x00\x7c\xdd\x7b\x17\x80\xce\x6a\xb1\x29\x10\
-\xa9\x8f\x02\x44\x01\x68\xab\x12\xfb\x02\x39\xfd\x17\x3d\x8d\x11\
-\xc9\x44\x20\x35\x5a\x33\xb8\x1a\x92\xcc\x04\x17\x7e\x69\x82\x57\
-\x9b\x33\x12\x84\x6e\x18\x33\xa0\x65\x7b\x06\x82\xc8\x63\x00\x3e\
-\x03\xd0\x5e\x6b\x5b\xb0\xe7\x19\x00\x2f\x37\x9e\xef\x02\xe8\x3a\
-\x67\x53\x50\xf3\x03\x80\xdb\xeb\x44\x6a\xb5\x7b\xe0\x7a\xc8\x8e\
-\x40\xef\xe0\xc5\xb0\x88\xc8\x8e\x37\x00\x3c\x8a\x58\x17\x68\x7b\
-\xa8\xfb\xb8\x3e\xea\x2d\xaf\x01\xf8\xb8\xdb\xa2\x20\x78\x1f\x80\
-\x8e\xba\xd8\xc2\xad\xbf\x03\xc0\xf7\xfd\x96\x04\x65\x4d\x00\xbc\
-\xab\xec\xd1\xfc\xf0\xa5\xd8\x89\x4a\x2b\xa8\x68\x05\xa0\xb9\x3c\
-\xfe\x4a\x3c\x61\x9c\xe9\x74\xfc\x64\x6d\xfb\x36\x04\x13\x2f\xd5\
-\x7d\xdf\xb4\x5b\x65\xb0\x39\x3f\xa3\x14\x20\x5a\x9f\xe4\xf7\x60\
-\xd7\x07\x00\x16\x8d\x30\x15\xcc\x03\x58\x35\x35\xf1\x9b\xa8\x94\
-\x52\x6a\xf8\x42\x80\x95\xe3\x4c\x05\x59\x85\xb0\x22\x5f\x25\x15\
-\xa8\xac\xf9\x50\x3a\x29\xcd\x1a\x0c\x5b\xba\x64\xac\x4a\x21\x50\
-\x6a\xf6\xda\xe9\x69\xbb\x30\x66\xa8\x4a\x2d\x50\xe3\xed\xfd\x2d\
-\x5a\x3e\x40\x7d\x44\x20\x36\xd3\x2f\xe8\x9b\x82\xff\x38\xff\x00\
-\xc1\x36\x30\x95\xf0\x66\xca\x60\x00\x00\x00\x00\x49\x45\x4e\x44\
-\xae\x42\x60\x82\
-\x00\x00\x04\xec\
-\x89\
-\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
-\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0\x77\x3d\xf8\
-\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
-\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x06\xec\x00\x00\x06\xec\
-\x01\x1e\x75\x38\x35\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
-\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
-\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\x13\x74\x45\
-\x58\x74\x41\x75\x74\x68\x6f\x72\x00\x52\x6f\x64\x6e\x65\x79\x20\
-\x44\x61\x77\x65\x73\x0e\xd8\x7e\x1d\x00\x00\x04\x4a\x49\x44\x41\
-\x54\x48\x89\x8d\x96\x5d\x6c\x53\x65\x18\xc7\x7f\xef\x39\x6b\xbb\
-\x7e\x9c\x75\x65\xad\x2b\x9b\xfb\xd0\x31\xdd\x14\xb6\x8c\x19\x44\
-\x90\x44\x63\x82\x42\x88\x5e\x90\x98\xcc\x19\x15\x13\xd4\x18\x76\
-\x61\xd4\x18\xe3\x85\x57\xca\x05\xe1\xc2\x0c\xa3\xa8\x51\xd0\x4c\
-\x12\xe3\x85\x31\x80\x26\x6a\xe2\x85\x23\xb0\x38\xb6\xc1\x1c\xce\
-\xb1\x40\x59\xf6\xe5\xca\xda\xae\xed\xfa\x75\x7a\x5e\x2f\x4e\xd7\
-\x59\xd6\x32\xfe\xc9\x7b\xf3\x9e\xe7\xf9\xff\x9f\xe7\xff\x9e\xf3\
-\x9c\x57\x48\x29\x59\x0f\xbd\x7b\x85\x0d\x17\xed\x1e\xbb\xb2\x07\
-\x20\x94\x30\x7e\x22\xc6\x48\xcf\x59\x99\x5a\x2f\x57\x94\x12\xf8\
-\xec\x55\x61\x71\x65\x6d\x47\xfc\xbe\xda\x47\x9d\x5a\xa5\xbf\xda\
-\x69\xaf\xda\xe0\x28\x2f\x07\x58\x5c\x4e\x26\xe7\xe3\x89\x9b\xf1\
-\x68\x78\x6e\x6e\x61\xfa\x8f\x98\x9a\x7a\xfb\x95\xe3\x32\x73\xc7\
-\x02\x9f\x76\x89\x8e\xba\xda\xda\x2f\xb7\x37\xdf\xdf\xe6\x2a\x13\
-\x8a\x94\x06\x82\xc2\x38\x89\x40\x08\x85\x98\x2e\x8d\xf3\x13\xe3\
-\x97\xa6\xa6\xa7\x5f\x7e\xed\x94\x1c\x5a\x57\xa0\xef\xa0\xfd\x70\
-\x5b\xf3\x96\x03\xcd\xde\x8a\x6a\x61\x64\xd7\x73\xc0\x14\x53\x54\
-\x26\x82\x4b\xf3\x97\x26\x2e\x7f\xd5\xfd\x79\xe2\xdd\x92\x02\x27\
-\x5f\x2a\x7b\xe1\x89\xce\x1d\xc7\xbc\x76\x55\x13\xc5\x98\xac\x4e\
-\x10\x0a\xa4\xa2\x6b\x45\x80\x60\x22\x1b\xfd\x6d\xf0\xdc\xa1\x17\
-\x4f\xe8\x5f\xaf\x11\x38\xfa\x9c\xf0\x6e\xdb\xf4\xc0\xf9\x6d\xf5\
-\xfe\x26\x30\xf2\x89\xca\xc6\x76\xd4\x07\xf7\xa3\xd4\x74\x80\xd5\
-\x65\x6e\xa6\xe3\x64\x03\xfd\x64\x2f\x9e\x40\x46\x67\xff\x27\xa3\
-\x30\x70\x63\x6e\x72\xe0\xea\xd8\xf6\x37\xbf\x95\x41\x73\x27\x87\
-\x06\x8f\xa7\x6f\x6b\x7d\x4d\x01\x39\x80\x52\xff\x08\x4a\xe3\xae\
-\x55\xf2\x5c\x27\x6a\xf3\x6e\x2c\x7b\x8f\x9a\x5d\xe5\x61\xb0\xb5\
-\xbe\xa6\xa9\xc1\xe3\xe9\x5b\x95\x04\x7a\xbb\x44\x47\x5b\x53\xcb\
-\x4e\x15\xbd\x98\x31\xc8\x70\x00\xfd\xfc\xc7\x64\xce\xbc\x81\x7e\
-\xe1\x13\xc8\x75\x2d\xb4\x8d\x28\xb5\x0f\x15\xc4\xaa\xe8\xb4\x35\
-\xb5\xec\xec\xed\x12\x1d\x00\x65\x00\xee\x72\x65\x9f\x5f\x73\x38\
-\x05\x6b\x0f\x35\x3b\xf6\x03\xfa\xc0\xf1\x3c\x29\xb3\xc3\xa8\xf7\
-\x3e\x8e\xf0\xb5\x98\x22\xf6\x0d\x05\xf1\x02\xf0\x6b\x0e\xa7\xbb\
-\x5c\xd9\x07\x0c\x29\x00\x9a\xc3\xd5\x69\x55\xd5\xe2\xd5\x47\xe7\
-\x56\xc9\x01\xe1\xbe\x1b\xe1\xb9\x67\xf5\x79\x70\x7c\x4d\x8e\x55\
-\x55\xd1\x1c\xae\xce\xbc\x45\x15\x6e\x5f\x9d\x90\xc5\xed\x29\xa8\
-\xae\xa2\x06\xcb\x53\x47\xa0\xcc\x66\x76\x37\xfa\x3d\xd9\xa9\x81\
-\xb5\x71\x52\xa7\xc2\xed\xab\x83\x9c\x45\x76\xbb\x56\x25\xa5\xa4\
-\xe8\xab\xb9\x02\x9b\x86\x65\xf7\x87\x08\xcd\x6f\x92\x8f\x9f\x21\
-\xf5\xdd\xf3\xa0\xa7\x10\xe5\x6e\x44\x45\x2d\x38\x7d\x08\x21\x90\
-\xd2\xe4\xcc\x0b\x24\x12\xd1\x9b\x42\xbd\xab\x81\x6c\xba\x28\xb7\
-\x94\x06\x65\xcd\x4f\x22\x2a\x1b\x00\x30\xa6\xff\x24\xd5\xb7\x1f\
-\x74\x73\x14\xc9\x64\x04\x99\x8c\x80\xc5\x8e\xe2\xae\x03\xab\x93\
-\x44\x22\x7a\x33\x6f\xd1\x52\x64\x61\x0a\xb5\xbc\x28\xb1\xb1\x34\
-\x83\x91\xb3\xc1\x98\x1d\xc1\x98\x1d\x41\x3f\xd7\x9b\x27\x2f\x40\
-\x26\x81\x11\xfc\x07\x99\x8a\x99\x9c\x2b\x1d\x44\x97\x63\x83\xc9\
-\xe8\xfc\x33\x36\x23\x05\xaa\x05\xd2\xcb\xc8\x74\xcc\xfc\x88\x72\
-\x5d\xa5\x7f\x3c\x74\x3b\x03\x0b\x90\x52\xed\x44\x97\x63\x83\x79\
-\x81\x48\xd2\x38\x3d\x1b\xcf\xbc\x53\x1f\xb9\xe4\x44\x1a\x45\x93\
-\xac\xcf\x7e\x83\xda\xb8\xcb\x2c\xf4\xd7\xf7\xd1\x2f\x9e\x2c\xce\
-\x2e\x14\xe6\xd2\x65\xf1\x48\xd2\x38\x0d\x39\x8b\x7a\x4e\xc9\xa1\
-\xd1\xc0\xb5\xfe\xac\xb7\xb5\x64\x55\xc2\xe5\x47\x54\x36\x98\xe7\
-\x60\xd3\x4a\xc6\x65\xbd\xad\x8c\x06\xae\xf5\xf7\xe4\x26\x6b\x7e\
-\x54\x04\x42\xa1\xee\xe1\x90\x31\x29\x1c\xde\xd2\xbd\xaf\x03\xe1\
-\xf0\x32\x1c\x32\x26\x03\xa1\x50\x77\x7e\xef\xd6\x69\xfa\x58\x7b\
-\xe7\x31\x5f\x78\x54\x23\xb3\x5c\x90\xac\xf8\xdb\x10\x0e\xf3\xab\
-\x35\x82\x13\xc8\xa5\xe9\x42\x76\x8b\x83\x85\xca\xcd\xd1\xdf\x47\
-\x06\x8b\x4f\xd3\x15\xf4\x1d\xb4\x1f\xde\xd2\xd4\x7a\x60\x93\x1a\
-\xaa\x26\x74\xfd\xce\x4a\xf7\x34\x72\x35\xeb\x99\xbf\x3c\x79\xe5\
-\xf6\xff\x83\x15\x7c\xf0\xb4\xd8\xbe\xb9\xa9\xe6\x8b\x1d\x0d\xd5\
-\xad\xae\xd8\x94\x22\x13\x21\x90\xb7\xcc\x29\xa1\x22\xec\x1e\x62\
-\xae\x3a\xa3\xff\xfa\xfc\xdf\xe7\xc6\x66\x5e\x3f\xf2\x0b\xfd\x52\
-\x16\x8e\x84\x02\x01\x21\x84\x0a\x54\x01\x95\x9a\x1d\xdf\x7b\x7b\
-\xac\x6f\xdd\x57\xb7\xb1\x6d\x83\xbb\xd2\x53\xe3\x10\x2e\x9f\xcd\
-\xb0\x00\xfc\x9b\x54\xf4\x99\x84\x8c\x2d\x86\xc3\xe1\x2b\x81\xd9\
-\xbf\x0e\xff\x9c\xfe\x28\x9e\x22\x08\x84\x80\xb0\x94\x32\x5c\xb2\
-\x03\x21\x84\x13\xf0\x00\xee\xdc\xd2\x5c\x56\x3c\x5b\xeb\x69\x79\
-\xb8\x51\x74\x18\x12\xe5\xc2\x75\x39\x3c\x74\x83\xc9\x78\x86\x10\
-\x10\x03\x96\x80\x48\x6e\x2d\x4a\xb9\x7a\x01\x28\x79\xab\xc8\x89\
-\x59\x00\x2b\x60\xcb\x2d\x0b\xa0\x02\x3a\x90\x02\xd2\x40\x12\xc8\
-\x48\x79\xab\x87\x26\xfe\x03\x26\x93\xd5\x41\x51\x76\x98\xdb\x00\
-\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
-\x00\x00\x01\xaa\
-\x89\
-\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
-\x00\x00\x40\x00\x00\x00\x40\x08\x03\x00\x00\x00\x9d\xb7\x81\xec\
-\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
-\x00\x00\x09\x70\x48\x59\x73\x00\x00\x37\x5d\x00\x00\x37\x5d\x01\
-\x19\x80\x46\x5d\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
-\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
-\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\x1f\x74\x45\x58\
-\x74\x54\x69\x74\x6c\x65\x00\x47\x6e\x6f\x6d\x65\x20\x53\x79\x6d\
-\x62\x6f\x6c\x69\x63\x20\x49\x63\x6f\x6e\x20\x54\x68\x65\x6d\x65\
-\x8e\xa4\x29\xab\x00\x00\x00\x36\x50\x4c\x54\x45\xff\xff\xff\xbf\
-\xbf\xbf\xbb\xbb\xbb\xb9\xb9\xb9\xc2\xc2\xc2\xc1\xc1\xc1\xbe\xbe\
-\xbe\xbf\xbf\xbf\xbe\xbe\xbe\xbe\xbe\xbe\xbf\xbf\xbf\xbf\xbf\xbf\
-\xbf\xbf\xbf\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\
-\xbe\xbe\xf1\xb6\xe9\xa5\x00\x00\x00\x11\x74\x52\x4e\x53\x00\x04\
-\x0f\x16\x19\x29\x4b\x58\x5e\x7d\x7f\xb2\xca\xe0\xe6\xf7\xfa\x2a\
-\xb3\x5d\x53\x00\x00\x00\x9e\x49\x44\x41\x54\x58\xc3\xed\x95\xb9\
-\x12\x83\x30\x0c\x05\x31\x18\x1b\x1f\x18\xeb\xff\x7f\x36\xc9\x20\
-\xcd\x24\xe1\x92\xe8\x00\x6d\xf7\x8a\x5d\xc0\x2e\x68\x1a\x65\x8f\
-\xce\xc5\x54\x4a\x8a\xae\x3b\xa5\x9b\x50\x01\xa9\xc1\xc8\xfd\x3e\
-\xc3\x17\xb9\x97\xfa\xc3\x04\x3f\x4c\x83\xf0\xf9\x7f\xfe\xbb\x20\
-\x7a\x07\x93\x61\x41\x96\x9c\x43\x80\x15\x82\xe0\xfe\xea\x5a\xa0\
-\xf2\x6f\xd3\x91\x33\x7a\x6b\xfd\x48\xcb\xb1\x03\x91\xfc\xf6\xb3\
-\x5a\x2a\x44\x76\x20\xa1\xe1\xe7\xe9\x71\x26\x76\xa0\xa0\x61\xe7\
-\x69\x71\x16\x76\x80\x3e\x7a\x6b\xdf\x3d\x00\x07\x68\x40\x03\x1a\
-\xd0\x80\x06\x9e\x15\xd8\xfb\xc1\x88\xd1\xc0\xe5\x02\x20\x44\x03\
-\xf7\x0c\x3c\x98\x17\xb4\xcd\x62\x13\x3b\x4c\x60\xe6\x00\x00\x00\
-\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
-\x00\x00\x02\xc8\
-\x89\
-\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
-\x00\x00\x40\x00\x00\x00\x40\x08\x03\x00\x00\x00\x9d\xb7\x81\xec\
-\x00\x00\x00\x03\x73\x42\x49\x54\x08\x08\x08\xdb\xe1\x4f\xe0\x00\
-\x00\x00\x09\x70\x48\x59\x73\x00\x00\x37\x5d\x00\x00\x37\x5d\x01\
-\x19\x80\x46\x5d\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\
-\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\x70\
-\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\x1f\x74\x45\x58\
-\x74\x54\x69\x74\x6c\x65\x00\x47\x6e\x6f\x6d\x65\x20\x53\x79\x6d\
-\x62\x6f\x6c\x69\x63\x20\x49\x63\x6f\x6e\x20\x54\x68\x65\x6d\x65\
-\x8e\xa4\x29\xab\x00\x00\x00\xb4\x50\x4c\x54\x45\xff\xff\xff\xff\
-\xff\xff\x80\x80\x80\xbf\xbf\xbf\xcc\xcc\xcc\xbf\xbf\xbf\xc6\xc6\
-\xc6\xb3\xb3\xb3\xc8\xc8\xc8\xc3\xc3\xc3\xba\xba\xba\xc4\xc4\xc4\
-\xbd\xbd\xbd\xb9\xb9\xb9\xb9\xb9\xb9\xbf\xbf\xbf\xbc\xbc\xbc\xbd\
-\xbd\xbd\xbe\xbe\xbe\xbd\xbd\xbd\xbc\xbc\xbc\xbe\xbe\xbe\xbe\xbe\
-\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbf\xbf\xbf\xbd\xbd\xbd\xbe\xbe\xbe\
-\xbf\xbf\xbf\xbd\xbd\xbd\xbf\xbf\xbf\xbe\xbe\xbe\xbe\xbe\xbe\xbe\
-\xbe\xbe\xbe\xbe\xbe\xbd\xbd\xbd\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\
-\xbe\xbe\xbe\xbe\xbd\xbd\xbd\xbf\xbf\xbf\xbe\xbe\xbe\xbe\xbe\xbe\
-\xbe\xbe\xbe\xbf\xbf\xbf\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\
-\xbe\xbe\xbe\xbe\xbe\xbd\xbd\xbd\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\
-\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\xbe\
-\xe4\x72\x0e\xe3\x00\x00\x00\x3b\x74\x52\x4e\x53\x00\x01\x02\x04\
-\x05\x08\x09\x0a\x0e\x11\x1a\x1a\x1f\x21\x2c\x2c\x35\x36\x3b\x3e\
-\x41\x43\x47\x4b\x4e\x50\x55\x56\x57\x59\x63\x66\x71\x86\x89\x90\
-\x95\x96\x9d\x9e\xa7\xaa\xad\xb5\xb8\xbe\xc0\xc3\xc5\xc9\xcd\xd6\
-\xe0\xe6\xef\xf5\xf7\xfc\xfd\xec\xba\xa4\x27\x00\x00\x01\x14\x49\
-\x44\x41\x54\x58\xc3\xed\x95\x5b\x57\x82\x40\x14\x85\x9d\x44\xb4\
-\x40\x50\x29\x92\x64\xb2\xd4\xee\xf7\xbc\x54\xf2\xff\xff\x97\xb2\
-\x98\x91\xe2\x20\xed\xe3\x4b\x2b\xe4\x7b\xfb\xd6\x39\x7b\x73\x99\
-\x87\xa9\xd5\x2a\x8a\x68\x58\xae\x17\x04\x9e\x6b\x35\x76\x8a\x0b\
-\x27\x94\x8a\xd0\x11\xfc\x7c\xcb\x97\xdf\xf0\x5b\xdc\x7c\x7b\x20\
-\x7f\x30\x68\x33\x9f\x9f\xc9\xaf\x1b\x58\xef\x20\x7c\x49\xf0\x39\
-\xff\xc1\x91\x39\x38\x8c\xf3\x0b\xf3\x0a\x42\xfc\x34\x2d\x9d\xe9\
-\xdb\xa6\x69\xf7\xb5\x59\x70\x81\xab\xf3\x46\x6c\x86\x6e\x70\xe1\
-\x02\x4f\x25\xec\x44\x6d\xa5\x1e\x5c\x10\xa8\x84\x99\xa8\xa9\x34\
-\x80\x0b\xf4\x47\x6f\xf3\xb2\x17\xc8\x5f\xa8\x0a\xfe\x45\xc1\xc9\
-\x9a\x6d\xe1\x78\x56\xdd\x76\xc5\x74\xc7\x6f\xef\x93\x63\xdc\x33\
-\x18\x97\x5f\x51\xcc\x55\x13\x73\xc2\x28\x52\xdc\x60\x9e\xa5\xb7\
-\xd4\x0b\xd1\x29\xe2\x84\xeb\xcd\x3c\xba\x47\x9c\x30\x4d\x17\xe6\
-\x88\x13\x66\xe9\xc2\x42\x00\x4e\xb8\x4d\x17\x1e\x11\x27\x9c\xa5\
-\x0b\x43\xc4\x29\x77\x7a\xfe\x24\x20\x27\x1c\x3e\x24\xf3\x97\x23\
-\xcc\x73\x38\x7f\xfe\xf8\x7c\xbd\x38\x80\x3d\x87\xba\xc1\xf3\x3d\
-\xa0\xa3\xd8\xb9\x80\x7d\xad\xff\x6d\x81\x64\x52\x15\x94\xb3\x60\
-\x8f\x59\x01\x25\xba\xb5\x2a\xd7\xa3\x29\x75\x00\x00\x00\x00\x49\
-\x45\x4e\x44\xae\x42\x60\x82\
-\x00\x00\x27\x74\
-\x89\
-\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
-\x00\x00\x80\x00\x00\x00\x66\x08\x06\x00\x00\x00\x03\x23\x99\x54\
-\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
-\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x02\x3a\x00\x00\x02\x3a\
-\x01\xfe\x36\x29\x51\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
-\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
-\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x26\xf1\x49\x44\
-\x41\x54\x78\xda\xed\x9d\x05\x58\x54\x59\xff\xc7\x7f\x77\x86\x14\
-\x90\x50\x14\x41\x94\x10\x51\x09\x45\xb0\x50\xb0\x10\x10\x13\x6b\
-\x2d\x6c\xb0\xdd\xb5\xdb\x35\xd7\x5a\x5b\x09\xdb\xb5\xdb\xb5\x0b\
-\x19\x42\xba\x15\x24\x0c\x14\x29\x15\x29\x25\x06\x66\x7e\xff\x73\
-\x86\xb9\x38\xb0\x60\xbc\xaf\xfe\x5f\x70\x67\x9e\xe7\xf3\xec\xaa\
-\xc0\x1d\xe6\xfb\xb9\xe7\x9e\x3e\x80\x88\x20\xe5\xdf\x8b\xf4\x43\
-\xf8\x9a\x0f\x09\x40\x51\x2a\xc0\xbf\x27\x6c\x5d\x42\x77\xc2\x28\
-\x42\x34\x21\x9f\xc0\x27\xcc\x21\x34\x94\xf8\x3a\x19\xa9\x00\x3f\
-\x4f\xe8\xfa\x84\x49\x84\xab\x84\x62\x02\xd6\x40\x1e\xe1\x00\x21\
-\x96\xb0\x59\x2a\x40\xdd\x0f\xbe\x1d\xe1\x2d\x1b\x70\x23\x79\x79\
-\xec\xa7\xa5\x85\xab\x5b\xb7\x46\x2f\x73\x6b\xf4\x6c\xb0\x12\x97\
-\xa8\x4e\xc6\xd1\x4a\x4e\x68\x2d\xdf\x0e\x75\xb8\x8d\x90\x03\x1c\
-\x56\x86\x9b\x52\x01\xea\x5e\xe0\xc3\x08\xbb\x08\xd3\x09\xf3\x08\
-\x6f\x54\x64\x64\x70\x93\xa9\x29\xbe\x74\x74\x44\x1c\x32\xa4\x82\
-\xe2\xc1\x83\x05\x99\xba\xde\xc2\x77\xba\xfe\x28\x49\x82\xf6\x55\
-\x6c\x29\xdb\x9c\xfe\xb0\xd4\x9f\xe1\x31\xf0\x6f\x13\x60\x11\x7b\
-\xb7\x73\x18\x06\x27\xe9\xe9\x61\x66\xbf\x7e\x95\x82\x97\x24\xdb\
-\xe4\x68\x5c\x55\x01\x28\x8f\xb5\x2f\xa3\x9e\x8c\x0e\xfd\x39\x01\
-\x04\x6d\xa9\x00\x75\x23\x7c\x2f\x1a\xbc\xa5\x66\x7d\xf4\xb0\xb0\
-\xc0\x94\x2a\x77\x7c\x75\x14\x77\x5f\xe9\x53\x9d\x00\x94\xa8\x26\
-\xe7\x45\x8f\x04\xf2\x33\x93\x08\x06\x52\x01\x6a\xbf\x00\x9a\x84\
-\xe7\x2b\x26\x70\x30\xca\xa5\xfd\x17\xc3\xa7\x94\x0d\x18\x17\x56\
-\x93\x00\x94\xb0\x26\xa7\xb1\x31\xb7\x01\xfd\xe1\x45\x84\x38\x82\
-\x8e\x54\x80\xda\x2b\x00\x43\xc8\x98\x3b\x06\x30\xf5\x06\x60\xc9\
-\x1c\x9b\xf8\x2f\x4a\xe0\x3c\x24\xff\x5d\x33\xbf\xb2\xcf\x49\xf0\
-\x50\xeb\x38\x36\xe0\xa8\xb2\x15\xc3\x35\x3f\xf8\xfd\xdb\x12\x8e\
-\x88\x1f\x3d\x43\xe9\xdf\x49\x05\xf8\xb6\x0f\xd1\x63\xe2\x40\xc0\
-\x1c\x1e\xe0\xfb\x07\x90\x57\x3a\xab\xfb\x17\x25\xc8\x6b\x75\xe6\
-\xc9\xe7\x04\xa0\xb8\x6b\xac\x60\x05\x08\xfb\x01\xef\xd9\x98\x70\
-\x81\xca\x4b\xaf\xa1\xa4\xda\x00\xad\xec\x7f\x41\x0e\x87\x4b\xff\
-\xf1\x11\x15\x41\x2a\xc0\xd7\x7f\x98\x9e\x43\x7a\x95\x0b\x20\xc2\
-\x07\xde\x97\xce\xe8\x91\xf0\x39\x01\x8a\xba\xad\xe1\x7d\x49\x80\
-\xe9\x03\xf7\xa3\x4d\xa7\xc9\xac\x04\x7f\x7e\xc7\xf7\x3b\x88\xf6\
-\x3d\x34\x92\xd7\xc2\xfa\xb6\xe3\xb1\xdb\xbe\x68\xec\xeb\x1e\x8e\
-\x93\x4e\x25\x09\xe7\x1c\x8b\xc2\x96\x96\x3d\xd8\x6b\x3e\x24\x58\
-\x48\x05\xf8\xfc\x87\x29\x4f\xc8\xe9\x69\x25\x21\x00\xe5\x01\x64\
-\x97\x4e\xef\x95\x54\x63\x3d\xa0\xdf\xc4\xe0\xcf\x85\xef\x6d\x1b\
-\xf0\x68\xd3\xf2\xe7\xb8\xfc\xd7\x60\x36\x8c\xb4\xef\x54\xdc\xaf\
-\xe6\x32\x5c\xe1\x9e\xd6\x1e\xd8\x65\xf8\x45\x64\xd6\x3d\xc1\x4e\
-\x07\x9e\xa6\x39\xdd\x42\x64\x71\xbc\xf2\x11\x3b\xad\xb9\x86\x5c\
-\x05\x25\x51\x8b\x44\x2a\xc0\xe7\x3f\x54\x2b\x1a\x90\x85\x71\x15\
-\x01\x44\x25\x01\xf3\xb6\x6c\x6a\xaf\xa7\xd5\x09\x20\x1c\x32\x34\
-\xbb\xa6\xf0\xdf\x36\x0b\x28\xfb\x73\xc9\xf3\x64\x2a\xc0\xba\xc5\
-\xf1\x58\xaf\xe5\x50\x7a\xa1\xa7\xff\xed\xb8\x03\xe1\x9c\x22\x47\
-\x11\xcf\xb7\xbb\x82\xa3\x06\xdd\x10\x02\x09\x9f\xc2\xac\x4f\x10\
-\xd8\x9c\xc9\x8e\x1b\x72\xb2\x18\x6d\x3c\x62\xb1\xdd\xa2\x13\xa8\
-\xd0\xb0\x29\xfd\xa6\x77\x84\x66\x75\x52\x00\x71\x30\x46\xdf\xa3\
-\x42\xf3\x85\xeb\xb4\xa1\x02\xe8\x6b\x57\x23\x00\x21\x97\xc7\x64\
-\x09\xa6\xf4\x7e\x5e\x9d\x04\x39\x2d\x2f\x3d\xab\x4e\x80\xdb\xfd\
-\xa3\xfd\x69\xf8\x94\x51\x2e\x97\xb1\xe9\xcc\xb7\xf4\x42\x31\xff\
-\xc5\x7b\xec\x4a\x08\x57\x97\x55\x47\x9f\x8e\x01\xb8\xbc\x9f\x3f\
-\x1f\xd6\xc6\x0b\x68\xf8\xf2\x6b\x13\x0b\x7a\xce\x7b\x19\x36\x7b\
-\xd2\x8b\x0f\x33\xc6\xc4\xb0\xa5\x0d\x5f\xdc\x2d\xdd\xb5\x4e\xd6\
-\x01\xc8\x6b\x3e\x41\x20\xd1\xcf\xbe\x91\x50\xef\x07\x5d\xab\x19\
-\xbd\x8e\x7a\xfd\xea\x05\x10\x49\xf0\x80\xc9\x14\x4c\xb6\x4b\xa9\
-\x2a\x40\x61\x97\x3f\xfc\xaa\x86\x9f\xa5\x17\x50\xb4\x79\xe9\xf3\
-\x74\x1a\xfe\x8a\x25\x2f\x22\xf5\xe6\x95\x60\xb3\x05\x02\x64\x38\
-\x72\xb1\xdf\xf8\xbe\x5a\xd3\xe2\x9e\xf0\x44\x51\x46\x19\xed\x0d\
-\xc7\xe0\x8d\x8e\xf7\xf1\xa8\x53\x5c\x21\x09\xbe\x50\x6f\xd5\xcb\
-\xc7\xe3\xa6\xa7\x3e\x5a\x31\xb3\x50\xb0\xe6\x57\xc4\xd9\xe3\x12\
-\xb0\x6d\x2b\x17\xfa\x8d\xf7\xea\x6c\x33\x90\xbc\x38\x84\xbd\xa2\
-\xe0\x3b\x8f\xc3\xb6\xee\xbe\xa8\xda\x67\x12\x82\xbc\xe8\x59\x96\
-\x46\x58\x49\x68\xfc\x9d\xaf\x59\x4f\xd4\x03\xc8\x21\x2d\x00\x9f\
-\x9a\x25\x20\x8f\x83\x74\xc1\x64\xfb\x57\x92\x02\x94\x3a\x4d\x79\
-\x58\x55\x80\x8b\x23\x1e\xf1\x68\xf8\x6b\x97\xa7\x3c\x36\x58\x50\
-\x56\xd8\x6c\x81\x10\x29\x5c\x65\x6d\x7a\x31\xcb\x2f\xbc\x17\x5a\
-\xe2\xad\xa0\xb5\x78\x86\xe1\xa0\xa5\x76\x2f\x5c\x6a\x73\x08\x6f\
-\x8c\x79\x83\xde\xe3\x3f\xe2\x61\xc7\xa0\xd7\xc3\x17\xbd\x09\x5a\
-\x31\xb7\x38\x83\x86\xbe\x62\x56\x2e\x4e\x1e\xf5\x37\xea\x36\xe9\
-\xc8\xde\xf9\x2f\x69\x93\xb0\x4e\x0a\x40\x5e\xb2\x84\xd3\x20\xab\
-\x88\x30\xee\x10\xca\xac\x7d\x54\x36\x80\x14\x9a\xfd\x22\x05\xa8\
-\xb5\xe8\x14\x32\xdd\x5d\x59\x11\x4a\x08\xe7\x09\xd3\xe8\x5d\xf2\
-\x9d\xae\x5d\x4a\x3f\xc0\xf4\x3b\x50\x52\xa3\x00\x14\x6f\x26\x4d\
-\x30\xc1\xfe\xf5\xa7\xfe\x80\xa1\xe9\x92\xe1\xa7\xb5\x78\xf8\x7e\
-\xd3\xb2\xe7\xb9\x1b\x96\xbd\x78\x66\xbc\xb0\xec\x3d\x1b\x3e\x45\
-\xae\x91\x05\xbd\xd0\xe2\x1a\x46\x1c\x97\x10\xa2\xe8\x7b\xd0\x53\
-\x6b\x8d\x6e\x96\xeb\xf1\xd0\xc0\x20\x3c\xe4\x74\x1d\x8f\xd9\x6d\
-\x29\xe2\x39\x4c\x49\xf5\x1b\xec\x70\x6f\xd1\x8c\x5d\x89\xdb\xb6\
-\x79\xe7\x78\x79\x46\xe0\x86\xf5\x37\xb1\xad\xb9\x2d\x8a\x47\x29\
-\xb7\x10\xfa\x10\xb8\x75\xb2\x23\x48\x3c\x18\x13\x00\x8d\x8d\x11\
-\x56\xc5\x22\x78\x21\xb6\xba\xfc\x21\x82\x0a\xc0\x62\x71\x2c\x86\
-\x2f\x33\xfd\x28\xc2\x5c\x62\xbb\xbd\x0c\x82\x2e\x31\x9e\x11\x59\
-\x9f\x25\x6e\x0f\x53\x21\x9a\xfe\x87\xe3\xfc\x54\x2a\x4c\xbc\x04\
-\x85\x9f\x15\x40\xd4\x3a\x60\x5e\x09\x26\x3a\xa6\xb1\x12\xbc\x37\
-\xba\xf6\x8a\x15\xe0\xf8\xc4\x78\xde\x1f\xcb\x5e\xa4\x9b\x2c\x2c\
-\x49\x93\x0c\x9f\xa2\xa0\xe7\x40\x2f\x36\x5d\x7c\x4d\x6d\xc2\x6f\
-\x84\x60\xd1\x68\xa3\x0a\xe0\xf8\xe6\x4d\x71\x6f\x47\x07\xbc\xd5\
-\xdd\x09\x53\xed\x74\x05\xe8\x20\x87\x94\x8f\x4e\xb2\x61\x27\xc6\
-\x37\xbb\xe3\xe6\xba\x9b\xef\xe6\xe6\x85\xe3\xc7\xff\x89\xfd\xfa\
-\xb9\xa2\xac\xac\x3c\xfd\x41\x7e\xb4\x0e\x53\xa7\x7b\x02\xc5\x95\
-\x30\x3e\x74\x18\x85\xb0\xbb\x40\x14\x3e\x78\x0a\x72\xfb\x45\x61\
-\xb1\xa4\x00\x94\xbe\x61\xfc\x22\xcd\x75\xa7\xb2\x80\x37\x49\x08\
-\x3b\xda\x22\x6c\xe1\x22\x4c\x21\x12\x58\x13\xd4\x2a\xc6\xe6\xe9\
-\x64\x8d\xf5\xb4\xb8\xa5\x77\x04\xc1\x84\x30\x9e\xb0\x9b\x70\x99\
-\xb0\x96\xe0\x2c\xee\x48\xd1\x20\xf8\xb3\x03\x41\xbc\xfd\x5f\x08\
-\xbf\x42\x02\xce\x4b\xc1\x38\xc7\x0c\x51\x3d\xa0\xd3\x36\x7f\x1a\
-\xfe\x8b\xd6\x81\x69\xa4\xe8\xcf\x6c\xbf\x20\xef\x65\xd5\xf0\xb5\
-\xa7\x24\xa3\x7c\x93\xce\xf4\x1a\x0b\x09\xf7\x69\xfd\xa6\x21\xa9\
-\x73\xb8\x3a\x01\xde\xdb\x4c\xc4\x3b\x08\xc8\x06\xce\x22\x70\x90\
-\x7b\xee\x33\x9a\x1b\xba\x6a\x7c\xdb\xc8\xa9\x6e\x1e\x48\xc3\xb7\
-\xb3\x73\x43\x15\x15\x51\x17\x33\xed\xfc\x99\xf0\x23\x2b\xc8\xff\
-\x9f\xdd\xb0\x7e\xa2\xf0\x49\xf0\xdc\xbd\x25\x02\xcd\x3d\xa5\xf1\
-\x1d\x0e\x09\xaf\xad\x7f\x9f\xeb\xb3\x29\x37\xc7\xe7\x4f\xc2\x8e\
-\xdc\xf7\xbc\x5d\xf9\xd9\xbe\xfb\x3e\x64\xfb\xed\xfd\xf0\xd6\xd7\
-\x32\xfa\xde\x35\x48\x9c\x2d\x80\x08\xd7\x7c\xf0\x34\x7f\x0f\x1e\
-\x1c\xf2\xfd\x24\xc4\xd5\x84\x11\x04\x53\x82\xbc\x28\xd4\xec\xea\
-\x26\x70\x70\xb9\x5c\x54\x56\x56\xc3\x46\x8d\x9a\xa2\x9e\x5e\x6b\
-\x6c\xdd\xba\x03\x5a\x5a\xf5\xc1\x3f\x16\xb7\xfd\x3a\x01\x44\x8f\
-\x03\xce\x0b\xc1\x78\xa7\xcc\x32\x87\xe9\xa2\x8a\xe0\x81\xe9\x09\
-\xbc\x8e\xb3\x52\xd2\xd9\xd0\x75\xa6\xa5\xa2\x7a\xcf\x6d\x28\xa7\
-\xd5\xa1\xe2\xba\xea\xca\x80\x13\x1c\x00\x6f\x6e\x00\x2c\xba\x01\
-\x58\x7a\xbb\x9c\x9c\xd3\xf0\x4c\x22\xfc\xdc\xb8\x61\x32\xbc\xfd\
-\xae\x50\x30\x63\x84\xc3\x1b\x1a\xbc\xb3\xf3\x32\xd4\xd2\x6a\xc1\
-\x36\xeb\xdc\x68\x5d\xe9\xa7\x18\x0b\x20\x2f\x17\xfa\xc1\x18\x8d\
-\x3a\x8a\xbd\x97\x3d\x2f\x1a\xbc\x15\x4b\x09\x48\x19\xb9\x4f\x18\
-\x71\xa4\xe0\x4d\xe6\x19\x61\x06\xb2\x1c\xe3\xbf\x2e\xe8\x16\x1e\
-\x1c\x6b\x19\x1e\x8a\xc6\x31\xd7\x78\x90\x38\x0b\x45\x3c\x99\x91\
-\x0a\x67\xdb\xc5\x81\x17\x23\x14\x89\x40\xd9\x07\x28\xb7\x82\x8b\
-\x4d\x5c\x95\xb1\xfd\xb4\x26\xd8\x6b\xbd\xbe\x70\xd8\x49\xb3\xd2\
-\x95\x07\x37\xe1\xc9\x93\x8f\xfe\xc1\xdc\xdf\xef\x65\x8d\x5b\x1e\
-\x86\x2b\x96\x8e\xc5\xb7\xf7\x81\xff\x35\x12\xe4\xde\xe7\x3c\x13\
-\x8c\x1d\x18\x93\xd0\x36\x24\xa9\xcf\xbc\x77\x91\xda\x0b\x32\x51\
-\x65\xfa\x29\x94\xd7\xed\x4e\x1e\x4f\xe5\x13\x44\x54\x95\x00\x5d\
-\xec\x00\xaf\xae\x03\x2c\xbc\xfe\x29\xf4\x4a\x9c\x67\x82\x49\xf0\
-\x65\x59\x03\x65\x7d\x8f\x4d\x81\xec\x3d\x93\x98\x82\x09\x23\x26\
-\x08\xc6\x8c\xd9\x8c\x2d\x5b\xd2\x92\x83\xa1\x2d\x22\x77\x5a\x62\
-\xfd\x54\x83\x41\xf4\x99\x4d\x6b\xae\x8a\x6a\xba\x68\xbf\xf4\x39\
-\xb2\xe1\xb3\x0c\xdc\x54\xf4\x61\xd1\x9d\xd8\x34\x1a\xfe\xb6\x94\
-\x48\x61\xdb\x2b\x17\x91\x86\xcf\xa2\x7b\x6f\x47\xbc\x6c\xc2\xf4\
-\x97\xca\x4f\x5c\x1f\x37\x79\xe2\x12\x6d\x1e\xda\x27\xca\xfa\xa6\
-\xee\x8b\x7e\xde\x46\x85\xa3\x03\xcd\xd0\x35\xcc\x12\xdd\xc2\x2b\
-\xe3\x1a\x6a\xc5\xdf\x72\xce\x2b\x53\x32\xfc\xcd\x7b\xc2\x22\x68\
-\xf8\x2c\x8b\xd7\x79\x66\xbd\x7b\x20\x93\xfe\x45\x01\xfc\x98\x92\
-\xfc\x20\xe5\x80\x81\xee\x81\x17\x65\x02\x62\xa3\x21\x2a\x0a\x99\
-\x07\x0f\xb0\x37\xf9\xf8\xfa\xc8\x00\x2e\xe9\x08\x78\x71\x26\xe0\
-\xf5\x79\x80\xb7\x17\x41\x49\xc4\x06\x78\x99\xe1\x0e\x21\xef\x0f\
-\xc1\x43\x42\x60\xee\x61\xc2\x51\x08\xcc\xda\xc8\x5c\xb9\x34\x81\
-\x49\xf2\x72\x03\x5c\x37\x4a\xb6\x64\xdc\xc8\x05\x68\x65\x35\x00\
-\x65\x64\x2a\x9e\xf3\xed\x7e\xda\xd1\x40\xf1\xb3\xf8\x8d\xa2\x5a\
-\x33\x34\xe8\x3a\x0b\x5b\x74\x5f\x80\xc6\x76\x2b\xb1\x8d\xe3\x06\
-\x34\x1d\xb0\x0d\xcd\x9d\xf7\xa2\xdd\xcc\x95\xa8\x37\xd5\x15\x35\
-\x47\x0c\x47\x55\x5b\x1b\xd4\xed\x6d\x8c\x63\x3d\xb4\x71\xe6\x05\
-\x4d\x74\xbb\xa8\xfd\xe1\xb7\xc0\x16\xb1\xf3\x1e\x99\x06\xcd\x8c\
-\x6a\x17\x33\x2d\xd2\xe2\xc5\xd4\x70\xcb\xdc\xaa\xc1\x57\xa6\x7d\
-\xf1\xae\xbf\x8f\x24\xd0\xf0\xbd\x0e\xc5\x04\x8f\x5f\xf1\x29\x7c\
-\x96\x99\x6b\xee\xe4\xbf\xba\xab\x11\x47\x42\x2e\xcc\x8b\x90\x4b\
-\xfe\x98\xa4\x14\x5c\x98\xde\x80\x57\x9c\xaf\xe5\xc7\x2f\xd1\x89\
-\x2c\x15\xea\xbe\x2e\xc5\x66\x82\x37\x45\xda\x41\xbd\xa2\xa6\x05\
-\x40\x4c\x0c\x52\x34\x3d\x3c\x8a\x4e\x93\x8f\xef\x73\x5c\x56\x81\
-\x17\x7e\x66\xc0\x8b\x71\x84\xb0\x27\x83\xa1\xd0\xcf\x19\x42\x68\
-\xf8\xf3\x07\xab\x62\x4f\xdb\x91\xa8\xa4\xa4\xce\xce\x2c\x1a\xf9\
-\xaf\x18\x0e\xa6\x03\x16\x6c\x53\x8c\xe0\x4d\x7b\xbd\x24\xfe\xcc\
-\x7e\x18\x67\xc5\x1d\x23\xb4\x9d\xfc\x17\xc3\x40\x5e\x37\x67\x40\
-\xaf\x28\xc0\xfb\xf8\x4f\xee\x0a\x99\x92\xbf\x8b\xe5\xd3\x4f\x17\
-\x28\x3f\x39\xf4\xae\x41\xd8\xee\xf4\x26\xfe\x1b\x53\x9a\xf3\x56\
-\x26\x19\xf1\x16\xc6\xb5\x09\x98\x1b\x63\x11\xb4\xfb\xe2\x5f\xb7\
-\x7f\x59\x12\x14\x3a\x70\x61\x40\x80\xdd\x5c\x3f\x5e\x97\xd9\xde\
-\x3c\xd3\xe9\xb7\x02\xf4\xa7\x5c\x0f\x6f\x38\xe1\x52\x62\x83\x91\
-\x57\xef\xbe\x2b\x68\xf5\x88\x04\x8d\x35\x90\xbb\x3b\x86\x9b\xb5\
-\x3e\x8c\xc1\x7a\x31\xbe\x31\x54\x80\x11\xae\xae\xe1\x55\x03\x3f\
-\xcb\x85\x8f\x77\x74\x21\x24\xcc\x06\x78\xf1\x83\x20\x85\x84\x8e\
-\x92\x78\xf6\x02\x81\x4b\x2f\x0d\x6c\xa4\xa9\xc7\x36\xeb\x68\x25\
-\x56\xe9\x5f\x35\x1f\x40\xdc\x2c\x72\xa8\xd2\xf7\xdd\x91\x36\xd3\
-\x6a\xf8\xfa\x06\x74\x84\x8d\x36\xe1\xac\x07\x01\x7a\x44\x54\x2f\
-\xc2\x03\x84\xe2\x50\x52\x53\x48\x44\x78\xf8\x06\xc1\xa7\x10\x21\
-\xb8\x90\x2f\x9f\xb0\xe0\xfc\x6a\x1f\x9d\x8d\xf7\x6f\xc2\xc8\xd3\
-\x58\x2d\xc3\xcf\x47\x42\xb7\x50\x81\xae\xf3\xcd\xcc\x62\xa1\x5e\
-\x56\x75\x02\xf8\xa4\xa9\xf9\x6f\x08\x03\xa4\xcc\x0a\x33\x7e\x4e\
-\x04\x28\x76\xd7\xd4\xcc\xa5\xa1\x5f\x52\x81\x67\xfe\xe6\xf0\x20\
-\xd6\x11\x42\xe9\x5d\x4e\xe0\xc7\x3b\x43\x46\xf4\x00\x48\xf2\xb5\
-\x87\xe4\xd3\x5d\xe1\xfd\x36\x0b\xf2\x7d\x2d\x01\xad\x1a\x71\x59\
-\xd1\xaf\xd4\x96\x59\x44\x75\x69\x30\xa7\x05\xe1\x22\xfd\x00\x3b\
-\xf7\x07\xdc\x43\x2a\x5a\x97\x7d\x00\x03\x23\xa0\xa4\xb0\x98\x8e\
-\xc0\x41\x99\xe4\xb7\xf0\x12\xad\xa3\x7a\x6c\xbd\x9a\x62\xbd\xf1\
-\x56\x1e\x73\x3e\x39\xa5\x7a\x01\xce\x64\x82\x6d\xd0\x1b\xe8\x1a\
-\x86\x14\xfb\x79\xfb\x63\x49\xe0\x7c\xc9\xf0\xf3\x4a\x75\x22\xd9\
-\xf0\x59\xfa\xde\x72\xbb\x7a\x51\x1b\x6e\xdd\xb4\x82\xbb\xb7\xac\
-\x21\xf0\x82\x15\x3c\x3a\x6a\x06\x19\x07\xdb\x40\xd1\x01\x63\xc0\
-\xfd\x84\xe5\xcd\x01\xfb\x37\x00\xd4\x95\xaf\xd4\x32\xa1\x4d\xd1\
-\x3e\xd2\x19\x41\xff\x9d\x08\xd6\x84\x4b\xea\xea\xc0\x0f\x0a\xfa\
-\xe7\x97\xe4\x16\xd6\xcf\x99\x74\x64\x4f\x40\xd7\x4d\x37\x91\x62\
-\xbe\xd7\x2f\x1c\xee\x65\xd0\xb0\x73\xab\x08\x50\x06\xbd\x7d\xa2\
-\xd9\xf0\x59\x36\x9d\x9a\xe5\x2b\x21\x40\xa1\xc7\x23\xd9\x97\x6c\
-\xf0\x0b\xbc\x8c\xe2\x06\x19\xac\x0c\x18\x2c\xbf\xde\x9f\x06\x2d\
-\x89\x3b\xb9\xc3\x67\xe9\x00\xda\xa8\x92\x16\x81\x4c\xa5\xd0\xe9\
-\x80\xcd\x49\x3a\xe8\x25\x9d\x12\xf6\x7d\x45\x68\xa4\xac\x0c\xaf\
-\xee\xdd\xfb\xf4\xd7\x67\x43\x07\x05\xd9\x6c\xbe\xfe\x96\x0d\x9f\
-\xd2\xe0\xdc\x93\xd7\x22\x01\x26\x9c\x8f\xaa\x24\x40\xff\x9b\x3e\
-\x55\xc3\x67\x09\x4b\xec\xee\x4f\x05\x08\x7b\xdb\x80\xb7\x3e\x98\
-\x53\x3a\x7d\x6e\xaf\x87\x7d\x95\xff\x7c\xd4\x07\xf6\x21\xc5\x41\
-\x66\x51\x1a\x0d\x7d\xab\x21\x69\xfa\x35\x06\x34\x27\xed\x7e\x59\
-\xa6\x22\xf0\x5c\x82\x0f\x61\x1b\x61\x6c\x6d\x9f\x35\x5c\xe7\x67\
-\xf8\xc8\x93\x22\x76\xa7\x97\xa6\x70\xf0\x9e\xc3\x51\x92\xc1\x53\
-\xac\x37\xdf\xca\x65\xee\xa6\xa3\x48\x80\x05\x77\x78\x9f\x9e\xfb\
-\x17\x42\x49\xd0\xc2\x9a\x04\x90\xef\x11\x58\x1c\xff\xdc\xe2\xbe\
-\x4b\xdf\xf1\x3e\xf6\x9c\x3d\x19\x6c\xf0\x2c\xa6\xdc\x5e\xa8\xa7\
-\x00\xc8\x94\x07\xfe\x4c\xdc\xfb\x48\xbb\xb9\x0d\x7e\xf4\xb0\xb6\
-\x54\x80\xca\x02\x4c\x94\x51\x50\xc4\xf6\x73\xfe\xc4\xaa\xe1\x53\
-\x4c\x3c\x02\x9e\x88\xc2\xa7\x6c\x0f\x7d\x28\x16\x20\x0d\x6c\x42\
-\xde\x57\x13\xfc\x47\x85\x2e\x01\x61\x7a\xed\x2f\x3d\xe8\xd9\x6a\
-\x5b\xdc\xd0\xa6\xab\x5f\xf6\x01\xf7\x5c\xc9\xe0\x7b\xc3\x4e\xd4\
-\x02\x4b\xf6\x4e\x4f\xa1\xd7\xff\x9e\x03\x33\x52\x01\xbe\x5d\x80\
-\x4d\x1a\x46\x6d\x70\x52\x50\x2a\x0e\x38\x19\x14\x42\x42\xff\x20\
-\x29\x80\xfa\xc5\xa4\xdc\x0a\x01\xce\x3e\x7b\x41\xc2\xe7\x43\x2f\
-\xbf\xc7\xe2\xc0\xf9\xb2\xd6\x41\xd1\x3a\xed\xaf\xde\xef\xda\x7a\
-\x6f\xf8\x30\xfd\x95\x45\x23\xf4\x97\xa3\x24\x83\x1a\x6c\x08\x92\
-\x14\xa0\x09\x54\x0c\xc7\x7a\xd0\x29\x66\xd2\xa5\x61\xff\x7b\x01\
-\x22\xf4\x7b\xf5\x17\x09\x40\x19\x73\x3f\xf9\x79\xb7\xcd\xb7\x12\
-\xc5\xc5\x7f\x36\x73\x4f\x5c\xfc\x53\xee\x66\x94\xc9\x0d\xb8\x7a\
-\xb9\x91\xd5\xad\x7b\x1d\x4c\x3c\x03\x9c\x0d\x57\xbf\xaf\x1a\x78\
-\x75\x38\xca\xef\xf0\xa3\xe1\x77\x83\x35\xa4\xc8\x17\x75\xfb\xae\
-\x94\x2e\x0f\xaf\x25\x6b\xf6\x09\x65\x6d\x27\xcc\xae\x10\x80\x32\
-\xf1\xe1\xab\xa2\xde\x1e\xbe\xbe\x66\xfb\x7c\x83\xc8\xf3\x3f\x53\
-\xf5\x68\x9c\xaf\xd9\xf4\x4b\xbc\x41\x6d\xff\x48\xfd\x9a\xc0\xab\
-\x32\x5c\x6f\x45\xa1\x0d\xb3\xb6\xb4\x29\xd8\xb0\x13\x31\xe4\xa4\
-\x02\xd4\x0e\x01\x3a\xd0\xe2\xd8\x76\xd5\x8e\x4a\x02\x50\x5c\x1e\
-\xa6\xc4\x1b\xfd\x7a\xe1\x82\x8b\xe9\x2a\xff\xb1\x6d\x56\xf8\x8d\
-\x36\x5e\xc6\x1b\x65\x44\x59\xee\x3f\xd2\x60\x45\xe8\x2f\x06\xcb\
-\x1f\x8f\xd0\x5f\xf6\x92\x04\x9c\xf7\x35\x12\x74\x6b\xe0\x82\x1c\
-\x90\xa5\x17\x5d\x22\xdd\x20\xa2\xf6\x08\xe0\x4a\x05\xe8\x7f\xe0\
-\x8a\x64\xf8\x02\xbb\x7b\xb1\x3e\x9c\xc3\xe7\xf8\x30\xe7\x52\x54\
-\xc3\xf1\x27\x92\x27\x59\xae\x4c\x99\xd2\x7e\x05\x7e\x06\xfe\xa4\
-\x76\x2b\x32\x27\xb6\x5d\x91\x34\xc1\x74\x79\xd4\x78\x93\x15\x81\
-\x63\x5b\x2f\xf7\x1d\x63\xbc\xc2\x67\xa4\xd1\x72\x1f\x6b\xad\xd1\
-\x69\xb2\x1c\x79\xb6\xeb\xb6\xa1\x54\x80\xda\x23\x00\x9d\x40\x8a\
-\x63\x6e\xc7\x8a\xc2\x9f\x10\xf8\xea\x75\xd3\x73\x77\x62\xe0\xd0\
-\x59\x14\x31\xfb\x72\x02\xcc\xb8\x8c\x72\x6e\x17\x72\x47\x76\x59\
-\x13\xfa\x05\x09\x24\x29\x1d\xd5\x6e\x75\x58\x3b\x83\x9d\x01\xb2\
-\x8d\x8f\x15\x80\xa2\x93\x50\x5c\xf1\x3b\x26\xdd\x22\xa6\x76\x09\
-\xb0\x55\xbe\xbe\x9a\x28\x7c\xe7\x07\x4f\x82\xe4\x8e\x5e\xc8\xab\
-\x08\x9f\x32\xe3\xca\x6b\x2a\x40\x39\x97\x84\xbd\xfb\x6c\xf7\x21\
-\xe1\x0a\x6b\x08\x5d\x30\xb6\xed\xef\x51\x1d\x8c\xb6\xfb\x29\x68\
-\x1d\xcd\x06\xcd\xe3\x58\x81\x7c\x45\xb3\xcf\x46\x2a\x40\xed\x12\
-\x60\x87\x9a\xb1\x29\x1a\x6e\xf2\xc0\x4a\xc1\xb3\x4c\xbf\x94\xff\
-\x49\x80\x72\x8c\x46\x1c\xa2\x25\x41\x2e\x1b\xfc\xb8\x76\xab\x1e\
-\x5b\x1b\x6f\xe5\x29\x69\x1d\xce\xac\x14\xba\x24\x32\xba\xec\x28\
-\x25\x23\x15\xa0\xf6\x84\xdf\x5d\xb4\x79\x53\x27\x13\x04\xa7\x65\
-\x08\xbb\x8f\xa5\x57\x0a\xff\xe0\xd9\xb2\xaa\xe1\xb3\x68\x8c\x39\
-\xf1\xd0\xc6\x78\xab\x8f\x6a\x93\xc3\xaf\x6a\x0c\x5d\x12\x46\x81\
-\x5e\x70\xa7\x74\x97\xb0\xda\x13\x3e\x9d\xdf\xff\x1e\xe4\x49\xad\
-\x7c\x8e\x1d\x42\xa7\xf9\xaf\xc1\xca\x4b\x00\x53\x8f\xf9\xc3\x81\
-\x33\xaf\x45\x02\x78\x9d\xcb\xae\x14\xfc\xb4\xcb\x2f\x60\xe0\x79\
-\x1e\x18\x9d\x7c\xf6\x55\xa1\xb3\x34\x70\x67\x8b\xff\x61\x52\x01\
-\x6a\x8f\x00\x83\x45\xa1\x74\x32\x40\x58\xe0\x88\x30\x75\x54\x12\
-\x58\x7a\xa1\x88\x8e\x5e\x25\xf0\xdb\x5f\x3c\xd8\x75\x2e\x86\x3c\
-\x02\xd2\x60\x18\x09\xbd\xcd\xc9\x27\xdf\x14\xba\x24\xf5\x67\xb0\
-\xa3\x79\x9a\x52\x01\x6a\x8f\x00\xbb\x80\xcb\x21\xcf\xf8\x9e\xe5\
-\x02\x50\x3a\xed\x8e\xae\x90\x80\xd2\xc1\x33\x48\xad\xe1\xb6\x6b\
-\x8a\x1a\xfb\x83\x48\x90\x1f\xfe\x63\x01\x64\x8d\xe9\x05\x4f\x4a\
-\x77\x0a\xad\x5d\x02\x3c\x06\xb3\xa6\x9f\xc2\xa7\x8c\x9e\x11\x54\
-\x49\x00\xf3\x6d\xbe\xd0\x72\x0d\xea\x68\x2e\x78\x68\xcd\x4c\xcb\
-\x32\x52\x5c\x19\x5c\x4f\xdd\x33\x80\x84\x9a\xfb\xd5\xe1\xab\x6f\
-\x62\x8b\xff\x8e\x52\x01\x6a\x59\xef\x1f\x8c\x53\x25\xcf\xff\x9e\
-\xf9\x9f\x24\xe8\x5b\x46\x82\x4f\xab\x10\xc0\x78\xc3\x0b\x2a\x00\
-\x45\x51\x7f\x55\x6a\x17\x8e\xdb\x23\x1b\x98\x82\xdd\xc0\x95\xdf\
-\x4a\x61\x69\xb8\x8a\xba\xbb\x1f\x09\xf9\xed\x67\x05\x50\xb4\xa3\
-\x17\x8c\x97\xee\x15\x5c\xbb\x04\x38\x06\xaa\x44\x00\x4f\x82\xbb\
-\xcc\x47\x58\xd6\x2d\xa0\x42\x82\x7e\x2b\x7c\xca\x05\xf0\x7c\xc5\
-\x86\xcf\xc2\x18\xad\x29\x6d\xad\x3c\xdb\x87\x48\x20\xa0\x22\x88\
-\x11\x98\xc8\x2d\x89\x56\x53\xdb\xc3\x63\x34\x8f\xa5\x55\x0a\xbf\
-\xe1\x41\x52\xfb\x57\xa4\x17\x9c\x23\x15\xa0\xf6\x84\xaf\x44\xf8\
-\x08\xb6\xe2\x05\x21\x2c\x9b\xf4\x1e\x12\x01\xf2\xe0\xd7\x41\xd9\
-\x44\x80\x22\xb0\xd8\xe3\x5b\x55\x00\x16\x0d\x9d\x25\xd1\xa4\x14\
-\x48\x97\x90\xa0\x02\x33\xd9\x05\x71\xea\xaa\xbb\x78\x8c\xc6\x4e\
-\x3e\xa8\x4c\xa2\x17\x2c\x20\xa8\x49\x05\xa8\x6d\xb5\xff\x39\x55\
-\x04\xa0\xec\xad\x97\x0a\x8b\xed\x62\xa1\xc7\x66\x7f\x30\xd9\x12\
-\x52\x93\x00\x14\x19\xc3\xd5\xef\x3b\xca\x4e\x0d\xae\x4e\x02\x16\
-\x65\x68\x8c\x3f\xcb\x5e\xc0\x3f\x93\x00\x5d\x45\x6b\x08\xa6\x56\
-\x23\x00\xc5\x93\x29\xe3\xce\xed\x7d\x9d\xd3\x6a\xed\xeb\xcf\x09\
-\xc0\xa2\xaf\x31\xd7\x8f\x84\x5d\x58\x35\xfc\x8e\x30\x0a\x45\x25\
-\xcd\x4f\xdc\xf4\xab\xcb\x75\x80\x67\xd0\xaa\x5a\x01\x3e\x5a\x9c\
-\x52\xe1\x9d\x08\x36\x7f\xf7\x21\x5d\xe1\x65\x7c\x58\xc3\x94\xcd\
-\x3b\xba\x05\xd8\x8e\x9c\xc8\x53\x6d\xbf\xec\x11\x09\xbc\xb8\x3a\
-\x09\x94\x9b\x2f\x7f\x6a\xcd\xb8\x26\x49\x0a\x60\x00\x9d\xbf\xeb\
-\x4e\x5f\x52\x01\xbe\xaf\x00\x0f\x44\x8f\x81\xf5\x50\x26\x0e\xbe\
-\xc8\xfc\xb4\x0a\xef\x78\x90\xf9\x9b\x1b\xe1\x96\x48\x89\x4f\x6a\
-\xea\x8b\x6f\xc8\x97\x4b\xc0\xcf\xe0\xf2\x7d\xef\x36\x8f\x9f\xb7\
-\xca\xc1\xd7\xbc\xef\xf4\x00\x79\xd3\x55\xcf\x88\x00\x42\x2a\x01\
-\xc7\x68\x75\x91\xb9\xe2\x4c\x5f\x56\x80\xfa\xe5\xc5\xbf\x41\x0d\
-\x13\x50\xe8\x32\x73\x2d\xf1\x76\x33\x2d\xc4\x4b\xde\xe9\x6a\xa7\
-\x5e\x84\xe1\xe2\xbd\x0b\x56\x8a\x17\x7e\x3c\x15\xcf\x0e\x3e\x21\
-\xde\x0d\x65\xad\x78\x51\x0c\x5d\xc2\x3e\x44\xbc\xd1\x83\xd5\xff\
-\x7a\x8c\xa1\x2e\x85\x6f\x4a\x67\x00\x81\x31\x09\x7e\x3b\x60\xeb\
-\x03\x4a\xe1\x47\x03\xcd\x32\xd9\xe0\x59\xbc\xa3\xcc\x43\xaa\x0a\
-\x50\x1d\x39\xcf\x14\xf2\xce\x9d\x6d\x13\x31\x76\xf6\x50\x9f\x66\
-\x36\xf3\x42\x74\x1a\x8f\xce\x6d\xcf\x0c\xa3\x17\x12\x8a\x17\x70\
-\x44\x88\x43\x7c\x2b\xee\x0d\xc4\xaf\x45\x56\x59\x09\x1b\xb4\x6f\
-\x8b\x6d\x46\x4f\x47\xdb\xa1\xd3\xd0\xa2\xa7\x33\x1a\xb6\xed\x8a\
-\x8d\x74\x5b\xa0\x82\x52\xfd\xaa\x5f\xef\x4b\xd7\x4d\x4a\x05\xf8\
-\x7c\xf8\xa2\x0d\x1e\xb8\x26\x0c\xea\x2d\x55\x44\xf7\x7b\x6d\xf2\
-\xab\x06\x2f\xc1\x07\x41\x16\x53\xf2\x35\x12\x88\xc8\x84\x14\x7c\
-\xc6\x04\xf8\xb8\xcb\xf0\x17\x2e\x6b\x8d\x93\xcf\x0d\xc7\x89\x67\
-\x86\xe1\x84\x53\x43\x71\xdc\xf1\x21\xe8\x72\x6c\x30\x8e\x3d\x3c\
-\x08\x47\x1f\x1c\x88\xa3\xf6\x0f\xc0\x5f\x3c\xfa\xe3\x88\x7d\x4e\
-\x38\x7c\x77\x5f\x1c\xba\xd3\x11\x87\x6c\x77\x40\xe7\xad\xf6\xe8\
-\xf0\x7b\x1f\x6c\xbb\x68\x16\xda\xfe\xe5\x89\x8e\xf7\xff\x46\xcd\
-\x33\xe7\x0a\x06\x5c\x0a\x2b\xf6\x0a\x47\xac\xca\xde\xc0\x22\xb4\
-\x58\xf8\x17\x82\xe9\x04\x04\x35\x03\xfa\x0b\x66\xfe\xaf\xb6\x9e\
-\xaf\x2b\x02\xf8\xa9\xb4\x51\xc0\x31\xbb\x9a\xe1\xce\x73\x6d\xf0\
-\x82\x9f\x45\xe9\x67\x04\xc0\x8c\x97\x6a\x91\xd5\x86\x9d\x05\x05\
-\xf8\x9a\x89\xc4\x58\xae\x0f\xde\x96\x0d\xc5\xa3\x0a\xd9\xb8\x5f\
-\x11\xf9\x5e\xf5\x9e\xfd\x7d\xd2\xbc\x6c\xc1\xb9\x01\x25\x8b\xc2\
-\xdd\xf0\x5b\x19\x7b\x73\x59\x92\xc3\x9d\x8b\xd9\x8e\xde\x57\x91\
-\xd2\x34\x24\x24\x12\x22\x9f\xe0\x9a\x88\x92\x77\xd5\x09\xe0\xf2\
-\xe0\x59\x02\x2c\x79\x16\x07\x83\x6e\xbe\x03\x7b\x2f\x04\xc5\x86\
-\xf4\x97\xec\x2b\x15\xa0\xfa\xf0\xe9\xd6\x2f\xd8\x7c\xa5\x16\x76\
-\x7a\x6e\x52\x41\xef\xa7\x26\x6f\x5d\x12\xcd\xa2\x97\xc6\xb5\xf5\
-\xdb\x17\xd3\x8e\x77\x26\xd2\x22\xec\x5a\x84\xe5\x2b\x22\x80\x20\
-\xf4\x91\x11\x8f\x04\x2e\xc4\x0c\x78\x8a\x49\x8c\x3f\x06\xc8\xf8\
-\xe1\x59\xf9\x64\x3c\xa0\x28\xa0\x81\x4b\xf2\xf1\x88\xfa\x43\x1e\
-\xaf\xeb\xc7\x7b\xbe\x36\x69\x8b\xc3\xdd\x84\xdf\x12\xfc\x82\xb0\
-\x99\xb9\x83\x79\x27\xfd\xd9\xe0\x29\x7a\x41\x41\x31\x34\x7c\x42\
-\x91\x67\x35\xe1\x2f\x0e\xcb\x4f\x80\xa0\xd0\x5c\x98\x23\xcc\x81\
-\xd9\xa5\x6f\xc1\xe1\x00\x82\xac\x32\xfd\x45\xb7\x49\x05\xa8\x5e\
-\x00\xba\x32\x18\xdb\xde\x6f\x51\x49\x80\x6a\xe0\xf7\x4e\x6e\x15\
-\xbb\x3c\x58\xcf\xfb\xe9\x85\x06\xa1\x78\x44\x21\xaf\x6a\xd8\x55\
-\xe0\xa7\x9e\xd7\xe3\xdd\xf7\xb5\x41\xca\x59\x7f\x47\xde\xb7\x84\
-\xef\x16\xb8\x26\xcc\xd1\xfb\xef\x0c\xc9\xf0\x0d\x03\x03\xe3\xc4\
-\xe1\xa3\x62\x44\x62\x42\xd5\xf0\x37\x86\x17\xa7\x71\x82\x82\x33\
-\x95\xce\x3e\x8a\xa2\x7d\x8c\x22\xfa\x9e\x10\x02\x47\x86\xfe\xa2\
-\x7b\xa5\x02\x54\x2f\xc0\x19\x86\xcb\xa0\xf9\x9d\xca\x02\x74\x7c\
-\x6e\x52\x6c\x9f\xd8\x2a\x7a\x71\xb0\x9e\x8f\xf7\x95\x86\x91\x45\
-\x87\x94\x3e\x7e\x21\xf0\x4f\x1c\xa8\x97\x19\x75\xd3\x3c\x86\x0d\
-\x9f\xb2\x31\x78\x4c\xfc\xd7\x04\x3f\x2f\x6c\x46\xde\x60\x9f\xe3\
-\x95\xee\x7a\x4a\xcb\x87\x01\x09\x6c\xf8\x14\xfd\x88\x94\x20\xc9\
-\xf0\x77\x86\x97\xe5\xc9\x06\x85\x24\x43\x50\x10\x26\xa4\xf4\x0d\
-\x4b\x7a\xe4\xec\xab\xb7\xfc\x75\x22\x8c\x89\x65\x2b\x83\x9e\x52\
-\x01\xaa\x17\x60\x8f\x68\xc3\xa7\xfa\x5c\xb4\xd8\xa3\x8d\x13\x8f\
-\x68\xe7\xfa\x5f\x6a\x18\x5d\x72\xb0\x5e\xd1\x57\x07\x2e\x01\xff\
-\xa0\x4a\x8c\xbf\x77\xe7\x2c\xc9\xf0\xef\xfa\xd9\xbe\xfc\x9a\xf0\
-\x5d\x83\xd6\x84\x3b\xde\xbf\x92\x5e\x35\xfc\xd6\x81\x7e\xc9\x24\
-\x74\xa1\xa4\x00\x76\x91\x59\x81\x6c\xf8\xee\xe1\x42\x7e\xfd\xa0\
-\xf0\x48\x1a\xbe\x6a\xf0\x83\x22\x4c\xb3\x2c\x26\xa0\x20\xd5\xb2\
-\x70\xc5\xfa\xbe\xa8\xd0\x40\xb4\xdf\x6f\xa9\x78\xc3\x08\x39\xa9\
-\x00\xd5\x6f\x9b\x5e\xd8\xc3\x1a\xf0\xdc\x06\xe6\x4d\xf6\x2e\x05\
-\x7f\x12\x66\xf1\xb7\x86\x9f\x73\xb2\x31\xcf\x9b\xd7\xad\x54\x32\
-\x7c\xca\xe9\xc0\xbe\x3e\x9f\x7d\xd6\x87\xce\xc8\x77\xe6\x1d\xf7\
-\xab\x1a\x3c\xc5\x24\x80\x97\x42\x02\x17\x48\x86\x4f\x99\x19\x51\
-\x10\xc7\x0a\xa0\x13\x1c\x13\x40\xc3\xa7\x2c\x7d\xbc\x26\x84\x86\
-\x2f\xc9\x8b\x60\x33\x1c\xda\x4f\x9d\x2d\x09\x62\xfe\x3f\xf7\x0a\
-\xaa\x4b\xfd\x00\xf7\xb8\x5c\xc0\xb4\x58\x10\x86\x9c\x86\xe0\xe3\
-\x33\x98\xb7\xc9\x9b\x64\x7d\x84\xfb\x15\x33\xbf\x22\xfc\x0f\xc9\
-\x57\x5a\x3e\xac\x1a\x3c\xcb\xfa\x90\x71\x49\x35\x3e\xeb\x83\x7e\
-\x8f\xe8\xeb\x7d\x25\xad\xba\xf0\xcd\xfd\x7d\x52\x49\xd8\xa5\x55\
-\xc3\xa7\x6c\x8f\x28\x2b\xa0\xe1\x5b\x86\x26\xf9\xb0\xe1\x53\x96\
-\x4f\x33\xf6\x7e\xb9\x59\xfb\x9a\xf0\x6e\xab\x20\x0c\x34\xf1\xc3\
-\xc7\xe6\x91\xfc\x64\x8b\x3c\x2a\x82\xcf\x85\x96\x68\xde\x5a\x91\
-\x9d\x85\xb4\xea\x7b\x37\x0d\xc5\xd3\xe9\x74\xea\xaa\x00\x4b\xe9\
-\x1d\x12\x78\xb3\xbc\x49\x97\x74\x1f\x7c\xe9\x86\x4b\x07\xa6\x02\
-\x3f\x68\x95\x4c\x00\xdf\x5d\xe1\x71\x75\xe1\x0b\x0f\xd6\x7b\x11\
-\x72\xa7\x7d\x72\x4d\xe1\xdf\xf5\xb5\x7d\x56\x5d\xf0\xf3\xc3\x67\
-\x14\x38\xf3\xfe\xaa\xf6\xae\xa7\xb4\xbf\xb6\x2b\x9d\x7b\xf7\x98\
-\x3f\x44\xc6\xa7\x55\x0d\x9f\x13\xf1\x24\x83\x86\x3f\x20\xe4\xb5\
-\xbf\x64\xf8\x1a\xb7\xce\xe7\xae\x19\x02\xc2\xdd\x7d\x74\x7c\x22\
-\x5c\x14\xee\x15\x1c\xd6\x7d\x8c\x37\x5a\x22\x45\x78\xc3\x28\xf7\
-\xd5\x09\xc3\x77\x01\xee\x7a\xb8\x75\xa6\x16\xb6\x6c\x2a\x87\xe2\
-\xce\x28\xd3\xef\xf0\xd9\xa9\x11\x22\xc5\x9d\x5c\x74\xc7\x55\xd5\
-\xba\x28\xc0\x1d\x83\xe6\x80\xc2\xac\x4f\xed\xfa\xf4\x10\xf0\xf3\
-\x9a\x0a\x42\x2a\x02\xe5\xfa\x7c\x4e\xec\xfb\x5d\x0a\x0f\x69\x0d\
-\x9f\x86\x5f\x74\x44\x35\x98\xf7\xc0\x3a\xbf\xa6\xf0\x29\xa7\x02\
-\xfa\xfd\xa3\xf8\x9f\x1a\xb4\x2a\x92\xdc\xf5\xaf\x6b\x0a\xbf\xf3\
-\x8d\xfd\x6f\xc0\xab\xef\x47\xf0\x72\x44\xf0\x72\x2a\x83\x4b\xeb\
-\x83\x20\x24\x3c\x9a\x15\x40\x2d\xf2\x69\x98\x5b\xd8\xbb\x08\x12\
-\x7a\xa9\xa4\x00\x43\x56\xf5\x8d\x20\x02\xe0\xba\xc1\x72\xa9\xbb\
-\x7b\x19\xe6\x06\x3b\xcb\x06\x26\xcd\x50\xf4\x2d\xbb\xd4\x22\x97\
-\x15\x81\xa5\xf8\x8a\x11\xf6\xb5\xaa\xd8\x33\x79\xda\x7f\xb9\x49\
-\xe7\xd5\x7a\x5a\xda\x68\x7f\xec\x0a\xfb\x98\x59\x5b\xd7\xea\x00\
-\xb4\xef\x5d\xb8\x6e\x89\x38\xfc\x34\x48\xc3\x27\xc0\xc3\x48\x48\
-\x4c\x3c\x09\x37\xf7\x4f\x85\x52\x56\x02\xca\x89\x59\x4c\x46\xd4\
-\x8e\xfa\x37\xef\xf3\x6c\xde\x7e\x2e\x7c\xca\xba\xb0\x71\xcf\x2b\
-\xee\xfa\xb0\xe9\x1f\x86\xfa\x1e\xf3\x25\x21\x0b\x6b\x0a\xdf\xe6\
-\xf6\x5f\xef\x18\xaf\xbe\xef\xcb\xc3\xaf\xc2\x09\xb7\x24\xf0\xbf\
-\xe3\xa7\x1d\x96\x78\x8b\x04\x9e\x2f\x19\x3e\x13\xf8\x10\x57\x0e\
-\x97\xcf\xa2\x02\x50\xb6\xda\x37\xe1\xed\xe8\xd9\xa2\xcc\x7f\x90\
-\x82\x5f\xd4\x30\x78\x9b\xb5\xb1\x61\x40\x55\x09\x3e\x5e\x6a\x85\
-\xc3\xac\x0d\xd8\xd0\x8e\xfe\x27\x87\x58\x93\xd7\x64\xfa\xfd\x03\
-\x2e\xf3\xd0\x2d\x0b\x51\xd5\xc0\x88\xfe\xe5\x8d\xba\x26\x40\x7f\
-\xfa\x4b\x5c\xdc\xc7\x60\xb6\x0f\xa4\x60\x38\x08\x09\x28\x26\xeb\
-\xce\x16\x08\x22\x12\x14\x4a\x4a\x40\xd9\x3f\x8d\x53\x74\x7e\x4f\
-\x33\xbf\x7b\xde\x5d\x13\xaa\x0b\xff\xb6\x6f\xf7\xc4\x8a\xbb\x3e\
-\x70\x55\x54\x5f\xef\xbf\x53\x6b\x0a\x9e\xd2\xe3\xce\xe9\x5c\xce\
-\xfe\x7e\x69\xd5\x86\x4f\x60\x3c\xec\xcb\x1a\x4d\x33\x0e\x30\x1c\
-\x62\x98\xac\xb7\x65\x6b\x68\xf3\x9d\x1e\x48\x69\x46\xff\xbb\xc3\
-\x1d\x7f\x99\x65\x1b\x4b\xc2\x2f\x13\x49\x30\x98\x93\x47\x04\xc8\
-\x26\xe0\xbd\x01\x4a\x3e\x51\x43\x01\x1f\x8d\x96\x89\x2a\x3c\xd1\
-\xfc\x99\xa4\x04\xd9\xa7\xac\x70\x56\xef\xa1\xa8\x28\x5b\x8f\xad\
-\x20\xb6\xff\x86\xcf\xad\x3e\x2d\xf2\x9b\x3b\x0c\x14\x85\x4f\x69\
-\xe9\xe2\x4a\xff\xc1\xa7\xae\x09\x20\xea\x0d\xfc\x63\xa3\x1d\x66\
-\xdd\xe3\xbe\x91\x08\x5f\xc4\x9b\xdb\xe0\x73\x6b\x13\x3c\x3e\x38\
-\x1d\x72\xaa\x4a\x50\x51\x2a\xac\xd1\x88\xba\x7d\xa3\x63\x30\x09\
-\xbe\x8c\x15\xe0\xaf\xc0\x01\x3e\x0b\xc2\x67\x92\xbb\xfe\xe8\x67\
-\xef\x7a\x8a\xdd\xdd\xf3\x1f\xb8\xfb\x07\x24\xd7\x14\xbe\xca\x72\
-\xab\x44\x43\xa7\x86\xa9\x2d\x1c\x35\x50\x8c\xd0\x60\xf6\x48\x6f\
-\x12\x7c\x3e\x0d\x9f\xc5\x7a\xe5\xac\xcc\x95\x43\xb9\xb4\x2e\x80\
-\x9b\x1c\x1b\xfa\x53\x01\x28\x57\x9d\xd4\x7c\x89\x04\x42\x42\xc9\
-\xf3\xb9\xca\x3e\x82\xab\x46\x85\xac\x04\x39\x67\x2c\x84\x2b\x9d\
-\x96\xa2\x89\x56\x3b\x14\x0d\x88\x01\xec\x24\xa8\x7c\xc5\xe7\xb6\
-\x96\x23\x23\x83\x23\x1e\x26\x60\xdf\x98\x8c\xcc\x46\x17\x1f\x16\
-\x28\xfc\xf6\x3b\xfd\x87\xb8\xba\x26\x00\x1d\x8a\x15\xae\xde\x3e\
-\x0e\xb7\x7a\x4d\x2c\x4b\xf1\xd1\xe1\x55\x29\x05\x0a\xfd\x76\x41\
-\xfa\xed\xcd\x90\x7c\x70\x26\x64\xd6\x24\x01\xe5\xf0\x7c\xc5\xd4\
-\xab\x67\x4d\x79\xe4\xf1\x90\xb3\x30\x68\x81\x8f\xe3\x83\x2b\xaf\
-\x68\xc0\x0e\x77\xaf\x7c\x74\xb8\x76\x3e\xd3\xfe\xf2\x99\xe7\x76\
-\x67\x8e\xc7\xd9\x1d\x3d\x14\xd9\x6b\xbf\x57\x70\xcf\xbd\x7b\xfd\
-\x7b\x6c\xdb\xce\x93\xdb\x3f\x38\xba\xba\xe0\xe5\x36\xdb\xe4\xeb\
-\xf6\x6b\xf4\x86\x0d\xde\xa0\xb7\xba\xd0\xb0\x8f\xba\x90\xfd\xb3\
-\xfe\x40\x83\x2c\xdd\xd5\x6b\x93\x25\x25\x68\xbd\x71\x03\x4e\xee\
-\xa3\x80\xbf\x3b\x33\xb8\xd1\x56\xff\x2d\x2b\xc1\x05\x27\xcd\x87\
-\x44\x00\x3e\x2d\x0d\xa2\x87\x33\xa9\xd9\x3b\xb5\x42\x59\x09\xf2\
-\xcf\xb5\x13\xac\xed\xbf\x01\xc7\x58\x4d\x45\x25\x39\x51\xd7\xf1\
-\x6b\x3a\xac\xfc\x99\xcf\x8b\x4a\x82\xda\xd6\x63\xd0\x72\xd7\x89\
-\x8f\xca\xa1\x19\xa5\x04\x94\x9b\x30\x87\x6d\x65\xc8\xd5\xb5\x4a\
-\xe0\xb3\x65\x1b\x47\xe3\x16\x2f\x37\x11\xd7\xce\xdb\x85\x0b\xc3\
-\x98\x4c\x56\x82\x7c\x5f\x08\x20\x8f\x02\x24\xbc\x3e\x3c\x0b\x52\
-\x6a\x12\xc0\xd3\x0d\xf2\x56\x4d\x82\x80\x4e\x2e\x72\xc1\x5d\xa6\
-\x2d\x88\xef\x3a\x75\x41\x0e\xa1\x8c\x80\x35\xd1\x74\xce\xf0\x7f\
-\x14\xfb\xdc\xdd\xbd\x51\xd3\x59\x57\xa8\xd7\x4b\x0d\xb5\x2c\x94\
-\x51\xad\xb9\x02\xca\xab\x70\x45\x67\x1b\x30\x1c\x40\x45\x0d\x19\
-\xd4\x68\xa1\x88\x3a\x1d\x55\xd0\xa0\x8f\x06\x36\x1e\x6c\x87\xcd\
-\x36\xef\x14\x09\x50\xbf\x57\x1f\xe4\x90\xaf\xeb\x63\x0a\x38\xa3\
-\x9b\x12\xb2\x02\x50\x4e\xd8\x6b\x45\x44\x0e\x85\x0f\x54\x02\x4a\
-\xdc\x04\xd9\xe0\x92\x33\xfa\x69\x54\x82\xc2\x4b\xe6\xa5\x1b\x06\
-\xae\x2f\x59\xe3\xb4\x1b\xcd\xb5\xad\xd8\xba\xc1\x55\xf6\xc0\x28\
-\x71\x91\x4f\x0f\xa5\xc8\xaa\x27\xd7\x0c\x5b\xb6\xde\x89\xcc\xaf\
-\xe1\xc2\xdd\x57\x03\x72\x56\xde\x7f\x94\xde\xe8\xef\xd0\x62\xb9\
-\x21\x2e\xec\xee\xea\x75\x4e\x80\x93\x0b\x56\x8f\xa8\x10\x80\xb2\
-\xe7\xd0\xb8\xec\xbc\x20\x95\x60\xb1\x04\xc2\x90\xfd\x10\x2f\x96\
-\xe0\xdd\xd1\x39\xf0\x44\x22\xf4\x82\xd5\x93\x21\xa0\xdb\x28\x6e\
-\x88\x8c\xb3\x4c\x31\x0c\x96\x41\x4a\x8b\xa1\xf6\xaf\xab\x0b\xdc\
-\xda\x6d\x3e\x5a\x0c\x9f\x80\x46\x3d\x1c\xb1\xf1\xb0\x5e\x08\x1e\
-\x0e\x08\xfb\xec\x11\xf6\xf4\x41\xd8\x65\x87\x32\x0b\x3b\xa0\x72\
-\x5b\x75\x94\x53\xe6\x4a\x8e\xeb\x17\x88\xb7\xbe\xa5\x5b\xdd\x9e\
-\x13\xef\x1e\x26\xfa\x37\x56\x08\x15\xa3\x26\xa8\xd4\xbe\x03\x5b\
-\x8c\xff\x45\x8f\xb1\x33\x6e\x02\xf8\x9b\x95\x4e\x25\x09\x0e\xf5\
-\xd6\x49\x88\x1a\x02\xd9\xac\x04\x84\x0f\xaf\x96\xa9\xf2\x84\xd7\
-\x8c\x4a\x4b\x2e\x99\xf1\xb7\x0d\xdd\xf2\x66\xbb\xf3\x51\x9c\xd4\
-\x79\x0e\x1a\x36\x34\x66\xa7\xb0\xd1\x93\x48\x72\x64\x38\xca\x68\
-\xd2\x64\x25\x76\xea\x1c\x82\xcc\xfc\xd8\x32\x58\xf1\x12\xbd\xee\
-\x86\x17\x12\x70\xcb\xd9\xbb\xd8\xb0\x45\x2b\xfa\xf5\x7b\xea\x62\
-\x33\xd0\x6b\xce\x32\xe7\x4a\x02\xb0\x04\x5f\xb7\xf0\x23\x02\x7c\
-\x2c\x09\x82\x68\xb1\x00\x78\x67\x33\x14\xac\x9c\x0b\xf7\xba\x4f\
-\xe0\x44\x28\x0c\x95\x29\x62\x43\x97\x44\x61\x80\x06\x5a\xbb\xce\
-\xc5\x76\xc3\xc6\x8b\xc2\x6e\x62\x62\x81\x2a\x8d\x9b\x20\x7d\x6e\
-\x56\x33\xd1\xa3\x4c\x7c\xe7\xd0\x49\x22\x61\xb4\x63\x4a\x7c\x8c\
-\xeb\x2c\x71\x1d\x85\xa9\xe1\x94\x92\xf6\xe2\x23\x5f\x27\x11\x42\
-\xc4\xb3\x9a\x4c\x25\xef\x58\x55\x05\xee\x9b\x3e\xcd\xd5\x71\x59\
-\xe7\xe6\x9f\x44\xe8\xde\x34\x37\x7c\x08\xa4\x4b\x48\x80\xd1\x23\
-\x98\xe4\x7c\x2f\x9d\x58\xfe\x15\x93\xd2\x7d\x23\x77\x24\x52\x09\
-\x28\x8b\xed\xfe\x40\x0b\xdd\x49\xd8\xc3\xe8\x2e\x8e\x68\x5f\x8c\
-\x1d\xba\x45\xf1\x61\xe9\xd3\x22\x1a\xbe\xcc\xaa\x97\x1f\x68\xf8\
-\x7f\x9e\xbb\x8b\x4d\x9a\xe9\xd3\x8b\x26\x4b\x9e\xc7\x54\x97\x04\
-\x08\x9c\x36\x7f\x40\xb5\x02\x50\x8e\xfc\x35\xec\x39\x3f\x44\x36\
-\x3e\xee\x04\x04\x53\x01\x96\xad\x01\x3f\xdb\x65\x20\x24\x20\xa5\
-\xdb\x12\x28\xea\xb8\x10\xde\x58\xcc\x65\x22\x9a\x3a\x32\xe9\x4a\
-\x2d\x18\x04\x25\x06\x19\x2e\xf7\x99\x78\xd7\xaf\x75\x84\x79\xe2\
-\xa0\xe8\x49\x23\x3d\x68\x97\x2c\xa1\xb9\x38\x28\xe6\x07\xfe\x6e\
-\x0a\x84\x99\x74\xeb\x39\x7d\x55\x05\xfc\xa5\x55\x23\xdc\x68\x63\
-\x80\x9b\xbb\xe9\x0a\xef\xf7\x67\xde\x4b\x4a\x40\x2b\x8a\x09\x6e\
-\x0a\xfe\x85\x67\x8d\xdf\x1c\x18\xbb\x27\x8a\x95\x60\xa0\x99\x6f\
-\xc1\x48\x4b\x44\x4b\xfb\xe7\x2f\x49\xf0\x1f\x69\xf8\x14\xb5\xa5\
-\x51\x1f\x69\xf8\xda\xcd\x0d\xd8\x3d\x8e\x9a\x7f\xb7\x9e\x40\xf2\
-\xea\x26\x3e\xe0\x60\x9f\x78\x20\x43\xfd\x07\x9e\x38\x52\x30\x69\
-\xb6\x63\x8d\x02\x50\xfe\xdc\x3f\x85\x1f\x7e\xad\xd5\xed\x75\x6b\
-\x18\x9e\xed\x12\x88\xb7\x59\x08\x01\x5d\x7f\x85\xbb\x9d\xa7\xc0\
-\xdd\x0e\x63\xc1\xcf\x72\x28\x3c\x6e\xd7\x1f\x0a\x2d\x06\x02\x2a\
-\x37\xac\xd8\xb4\x99\xa9\x45\x92\xcb\x88\x0f\xd7\x48\x90\xe5\x30\
-\xd8\xbe\xb1\x0a\xba\x99\x35\xc2\x33\x76\x9c\xb2\x2a\x12\x60\xf4\
-\x30\xc8\x4e\x5d\xa3\xe5\x7b\x68\xdc\x8e\x30\x2a\xc0\xe8\x0e\xfc\
-\xc7\x96\xce\xf9\x41\x24\xf4\x12\x36\x7c\x91\x00\xb3\xef\xa0\xb6\
-\x9e\x21\x7b\xcc\xbd\xee\x7f\x3d\x16\x20\xde\xbd\x7b\x66\xf9\xc1\
-\xc5\x0c\x6a\x69\x59\x60\x8b\x16\x0e\xa8\xa0\x20\x3a\x41\x3b\x94\
-\xa0\xfc\x03\x3e\x18\x1d\x5a\x0c\xf7\x18\xd4\x06\xef\x25\xad\xc9\
-\x7b\x8a\x5e\x58\x13\x87\x5e\x2d\xe2\x99\x8d\x94\x7b\x4d\x43\xae\
-\x89\xe6\xed\x45\xe1\xbf\xa7\x13\x3c\x6b\x69\x69\x47\x8f\xd5\x1b\
-\x45\x97\xa6\xd1\xdf\x5b\x43\x81\x8b\x03\xf4\xf5\xd0\xa5\x8d\x7d\
-\xe9\x62\xab\xee\x2f\xf7\xf7\xe8\x18\x7a\xc9\xb1\x45\xec\x83\x81\
-\x1a\xa9\x61\x23\x95\x22\x0e\x8c\xdc\x16\x60\x3a\xea\xfd\x03\x12\
-\x78\x99\x64\xf8\x22\x86\xba\xb3\x67\x32\xea\x7c\xf5\x60\x10\x0d\
-\x51\xfc\xec\xa2\x73\xf1\x1d\xc5\x56\xd2\xe2\xf1\x06\x1d\xb6\xe4\
-\x70\x64\x50\x53\xb3\x0d\x9a\x9a\xfe\x82\xd6\xd6\x0b\xd0\xcd\x2d\
-\x1c\x47\x8d\xba\x8a\x9c\xf2\x89\x0d\x93\x7f\xd0\x06\xd1\xe8\x34\
-\xae\x2d\x0e\x76\xb5\x2c\x3c\x13\x38\x3f\xb4\x9a\xf0\x8b\xa6\x06\
-\x8f\x0e\x00\x2f\x03\x34\x1b\xca\xc9\xab\x29\x7c\xf3\xbe\x80\x32\
-\xe5\x3b\x78\x4f\xad\x03\x8f\x3d\x2a\xc2\x48\xda\x6e\xa7\xbf\x7f\
-\x13\x65\x23\x1c\x6c\x3c\x1f\x47\x99\xac\xae\x84\x8b\xf9\xea\x57\
-\x8a\x2e\x57\x7d\x60\x46\xa0\x1f\x2c\x4a\x0c\x83\x15\x29\x39\x15\
-\x02\xb4\xe9\x2f\xea\x45\xfc\xe2\x68\xa0\xb8\xf8\x99\x2a\xee\x6d\
-\x12\x56\x54\x7e\x68\x9b\x53\x9d\x3c\x3f\x9a\x76\x41\x68\x35\x08\
-\x65\x4c\x47\xa3\x99\xb9\x0b\x5a\x5a\xba\x89\xe8\xd2\x65\x6e\x19\
-\x15\x80\x62\x6a\x64\x57\xa9\x97\xe9\x3b\x7e\x10\xa3\x64\xe5\xb9\
-\xe8\xec\x66\xc9\x22\xfc\xdd\x73\xb8\x4f\xb2\xd0\x53\x40\xc3\x4f\
-\x14\x7a\x64\x74\xba\xd6\x3d\x9e\x86\x4f\x10\x58\x0c\x02\x61\x4d\
-\x02\x68\xea\x57\x0c\xb9\x72\xeb\x50\xfd\x87\x8a\x30\x82\x96\xba\
-\x72\x5c\x05\xec\xa2\x33\xa4\x92\x00\x13\x2d\x36\xa4\xd9\x0f\x3c\
-\x14\x0c\x03\xbc\xb0\x82\xc1\x47\xf2\x60\xd0\x1e\x92\x9f\x68\x3c\
-\xc1\xe9\x6b\x04\x58\x29\x3a\xcc\x71\xd0\x7a\x84\x9d\xb9\x08\x8b\
-\x83\x11\xa6\xdd\x44\x70\xf5\x21\xff\x8d\x40\x20\x01\xb3\x70\xc7\
-\xdc\x7a\x61\xda\x79\x5e\x06\x15\xa0\x43\x87\xe9\xa2\xf0\x27\xba\
-\x5c\xcf\xf3\x73\x11\x1d\x70\x28\xa8\x5a\xd1\xf8\x0e\x1f\xc0\x2a\
-\x8d\x26\xf5\xb0\xff\x74\x13\x1c\xfc\x9b\x79\xce\x90\xb9\xe6\xd9\
-\xce\x73\xdb\xbe\x9f\xb4\xa1\x9b\xbf\x6f\xee\xc6\x50\xad\x13\x6d\
-\xb3\xc4\xe1\x23\x78\x1a\xe4\xd6\x14\xbe\xb1\x6d\xc5\x19\x84\xbd\
-\xeb\xe8\xd6\x38\x8c\x78\x53\xea\x94\xa6\x2a\xad\xd0\xd9\x78\x81\
-\x48\x80\xf1\x16\xab\xd2\xb7\x38\x1f\x2d\xe3\x0c\xd8\xff\xa6\x92\
-\x04\x66\xa3\xd8\xdd\xcb\xe5\x3e\x2b\x80\xf8\x28\xd3\x12\x70\xde\
-\x58\x7e\x9e\x5f\x55\x3c\x05\x7c\xd8\x51\x90\x0c\x1b\xdf\x06\xc3\
-\xe2\xa7\x71\x30\x2d\x92\x4f\x44\x78\xdf\xdc\x61\x47\x30\x95\xc0\
-\xcd\x35\x2c\x37\xf1\xb7\x89\x69\xb8\xb4\x1f\x5a\x6a\x89\xea\x02\
-\x17\xbe\xe7\x9e\x7a\xb4\x6e\x61\x3c\x03\xf0\x97\x37\x95\x71\x7c\
-\x2a\xef\xcf\x39\xa0\x5f\x5c\x11\x3e\x81\xf1\x34\x48\xad\x56\x80\
-\x01\x80\xf5\xca\xcf\x1c\xbc\x52\xe7\x57\xf3\x94\x0f\xef\x5e\x92\
-\xe7\xd6\xc3\xae\xba\xc3\x71\xb8\xe9\x6f\xa2\x96\x80\x75\x7f\xaf\
-\xa8\x8a\xf0\x9d\x76\x23\xc8\x8b\xd6\x20\x1c\xff\xe2\x84\x10\xf1\
-\x71\xae\x61\xd0\xdc\x12\x61\x6b\x06\x0d\x1c\xc1\xa3\x14\x19\x0f\
-\xbe\x90\xf1\x2c\x2b\x61\xbc\xf0\x23\xb3\x1f\xf3\x09\xb9\x1c\x2f\
-\x7c\xcf\xf1\x12\x66\x2a\x7a\xf0\x43\xd4\xb6\xe7\xdc\x6f\x35\xcf\
-\xef\xda\xcd\xd9\x6b\xee\xd3\xf0\x29\x57\x87\x59\x61\x63\x25\x79\
-\xb6\xe2\x41\x7b\xa5\x14\xbe\xc3\x48\x20\xf6\xb8\x58\x29\xfc\x32\
-\xab\x10\x55\x9e\x64\xf0\x2c\x1c\x77\xfd\x84\xea\x04\xd0\x35\x07\
-\x76\xda\x95\xd1\x4f\xb3\xac\xab\xbc\x0f\xa2\xd8\xb0\x81\x29\xce\
-\xef\xb5\x16\xd7\x0f\x3e\x84\x4c\xbf\x7d\x7c\x91\x00\xe4\x71\x2d\
-\x7e\x8c\xdb\x7f\xd5\x8c\x20\xf2\x6a\x29\x3e\xa9\x52\xf4\x8d\x0d\
-\x9a\x34\x47\xa7\x49\xcb\x71\xc3\xd5\xe7\x9f\x66\xb6\x86\x10\x78\
-\x28\xf4\xba\x8e\xaf\xbd\x4e\xe1\x63\xcf\x83\xf8\x6c\xe7\xce\x9c\
-\xd7\xcb\x7a\xf6\xbc\x9e\x60\xa7\x7f\x25\xdb\xb5\x4b\x6c\xde\x5c\
-\x87\xb2\x92\x45\x7d\xf1\xf8\x80\x76\xd8\x49\x5b\x8d\x3d\xfd\x72\
-\xc3\x7f\x7a\x46\x0e\x5d\x4e\x25\xa3\x0c\x38\x3c\xad\x22\xfc\xdc\
-\x66\x37\x1a\x87\x57\x17\x3e\x45\x66\x4f\xf3\xe8\xaa\xe1\x9b\x39\
-\x00\x72\x65\x45\xbf\x97\xfb\x4f\xb7\xb6\xaf\x7c\xc5\x14\xed\x98\
-\x42\xfd\x06\x2d\xb1\xa1\x95\x2b\x42\x97\xb9\x28\x7a\x9c\x03\x2c\
-\xff\xa6\x29\x61\xb4\x87\x48\x7c\x5a\x17\x6d\xd3\x1f\xa7\x77\x31\
-\xc3\x30\x68\xd2\xc5\x01\xe7\xfc\x7e\x0b\xb7\xef\x78\x87\x5e\xe4\
-\x91\x20\xc9\xc2\x85\xb7\x43\xa6\x8e\xdf\xfa\x76\xae\x31\x93\xe2\
-\xae\x0a\xde\xcf\x0c\x99\xd0\xb7\x76\x8d\x53\x82\x86\x98\x15\x65\
-\xcc\xee\x8d\xa1\x13\xba\xa2\x8b\xa9\x0e\xca\x71\x39\x42\xf1\x49\
-\x61\xa3\xbe\xe5\xa4\x2c\xba\x22\xa8\xa9\x53\x79\xf8\xc3\x32\x99\
-\x67\xaa\xa7\x75\x5f\x54\x1b\xfe\x7e\xc3\xb7\x70\xac\x65\x98\xf2\
-\x6e\xad\xbf\xab\x0a\xa0\xa1\x2b\x0a\xff\x43\x6d\x6d\xf6\x7d\x07\
-\x09\xb8\xe2\xd2\xb6\x58\xa2\xe7\xf2\xab\x16\xb8\x7e\xcd\xa6\x0c\
-\x13\xc4\x7d\xdc\xa4\x99\xc7\x25\x4d\xbf\xbe\xa4\xd2\x77\x0e\xdd\
-\xdd\xf9\x42\x4f\xcf\x52\xf2\xff\xfb\xf3\xe9\xb1\xa7\xb3\xbb\x98\
-\xf2\x7e\x33\x86\xe2\x0d\x0d\xc1\xff\xb8\x2c\x84\x67\xb5\x82\x24\
-\xb4\x04\x61\x72\x37\xf5\x22\x9f\x7e\xad\x30\x6c\x5c\x17\x5c\x67\
-\x6b\x8c\x3a\x2a\xa2\x7d\xf7\xf2\x09\x07\xe8\x96\x2f\x5f\xb8\xbe\
-\x21\xed\x82\xed\xb0\x1d\xb0\x97\x1f\xf7\x8d\xec\x61\xbd\x7c\x71\
-\xe0\xc5\x70\xc4\x30\x09\xce\xb7\x0e\x03\xef\x76\x8f\x21\xaa\x63\
-\x2e\xc4\x5b\x0b\xe0\x9c\x1a\x4f\x71\x0b\x04\x48\x86\x6f\xd4\x15\
-\xfe\x31\x0b\xe6\xa7\x5e\xed\x0b\x20\x2f\x39\xe5\xeb\xbb\x4d\x0a\
-\x25\x2f\x3b\xf1\x81\xc8\xa2\x0f\x54\x49\x49\x03\x1d\x1d\xe7\x89\
-\x0e\x3b\x16\x31\x7e\x4b\x1e\x11\xe0\x03\x01\x97\xe9\x02\xcf\x93\
-\x54\x2a\x2f\x2a\x82\x7f\x81\x19\x64\x10\x11\x90\xf2\xbc\xb3\xb2\
-\xc0\xdb\xde\x10\x77\xd9\x18\x62\xb7\xa6\x1a\x6c\x30\xe1\xe2\x93\
-\x37\x7a\x8b\x9b\xa1\x7b\xc5\xa7\x68\xe6\x92\xc6\x0f\x82\x61\x3d\
-\x34\x3a\x5c\x1f\x99\xc3\x46\x69\x70\xc7\x2c\x19\x42\x3a\x7c\x24\
-\x61\x23\x3c\x91\x20\xae\xf3\x47\x38\xa6\x10\x4c\x57\x0c\xab\xfc\
-\x01\x3c\x36\xfc\x76\xfd\x01\x15\x54\x2a\x4e\xf7\xa8\xf7\x6f\x59\
-\xf2\xfd\x43\x67\x05\x93\x57\x6b\xf1\x81\x48\x6f\x15\x15\xeb\xa3\
-\xb5\xf5\x2f\x38\x79\xf2\x3e\x91\x04\x63\x2d\x5a\xbd\xa4\x02\x50\
-\x16\x18\x42\xc4\x3e\x2e\xe4\x78\x01\x14\xde\x56\x05\x9f\x92\xb6\
-\x90\xcf\x8a\x40\x49\xeb\xa8\x88\x17\x2c\x35\x70\x54\xd3\x7a\xa8\
-\xc0\x65\x10\x28\xc6\xf5\x10\x9c\x1b\x21\xac\xd0\x47\x38\x65\x86\
-\x10\xd5\xb9\x72\xd0\xd5\x11\x63\x95\x09\x07\x39\xf1\xec\x5e\x01\
-\xea\xab\x3e\x09\xa0\xdd\xa6\xe2\xee\xef\x2f\x0d\xfb\x3b\x4f\x0b\
-\xa7\x6d\x4b\xda\xc1\x40\x38\xa4\xac\xac\x2e\xb0\xb5\x1d\x8b\xe3\
-\x46\xaf\xc3\x99\x46\x20\x60\x25\x98\xdb\x12\x52\x76\xc9\xc3\x53\
-\x22\x01\x12\xde\x05\x36\x02\x5f\x41\x7b\x28\x95\x14\x81\x12\x66\
-\x29\x83\x1c\x5f\xab\x2f\x87\x5d\x95\x50\xf3\x44\x38\xc0\xa4\x49\
-\x6e\x16\xa1\xb9\x04\x7c\x69\xf8\x26\x7d\xc8\x23\x8b\x2b\x0a\xff\
-\x92\x34\xe8\x1f\xbc\x2e\x80\xee\xa4\x4d\x78\x51\xbf\xbe\x26\xda\
-\xea\xaa\x21\x2b\x00\xe5\x57\x63\x28\xd8\xa2\x0a\x21\x62\x09\xf0\
-\x20\x03\x2f\xe2\x9a\x43\x50\x15\x09\x84\x23\x16\xeb\x46\x7c\x53\
-\xf8\xbe\x46\xa1\x24\xf0\x82\xaa\xbb\x85\xe8\xcc\x81\x10\x2a\x80\
-\x5a\x93\x8a\x31\xfa\xa6\xd2\xa0\xff\x1f\x16\x86\x88\xc7\x0f\xe8\
-\xa8\xa0\xd0\x88\x3c\x77\xa7\xb7\x00\x49\x11\x84\xbf\x37\x01\x1f\
-\x56\x02\xca\x51\x19\x88\x4d\x35\x82\x47\xac\x04\x59\xe6\x70\x53\
-\x25\xac\xd3\xe3\xaf\x0a\xff\x7a\x13\x5f\x12\x76\x59\x75\x7b\x05\
-\xe9\x8c\x86\xf7\x86\x9d\x2a\x8a\xfe\x79\xd2\x90\xff\x9f\x57\x06\
-\x89\x77\xf2\x7e\xaa\x2c\x43\x9a\x6d\xba\x95\x24\xc0\x45\xfa\x10\
-\xe8\xc1\x81\x8f\x92\x22\x9c\x51\x80\xa0\x1c\x13\x48\x79\xd7\x09\
-\x1e\xdc\xb2\x51\xe3\x91\x80\x05\x35\x06\x4f\x6b\xfa\x67\x69\x07\
-\x50\x0d\x1b\x45\x11\x9a\x0e\x06\x94\xab\x27\x0a\xff\x61\x5d\x3f\
-\xd2\xad\xce\x2e\x0d\x13\x2f\x41\xf2\xa4\x07\x2b\x5a\x69\x00\xce\
-\x69\xf9\x49\x02\x52\x2f\x48\xd8\x23\x07\x69\x92\x12\x10\xf8\x57\
-\x35\xe0\x46\x96\x2e\xf8\x5a\x9f\x32\xf3\xad\x36\xfc\x47\x1d\x3f\
-\xc0\x11\xb9\xd0\xcf\x85\x2f\x6a\x05\x94\xf7\xf8\xe5\x7c\xef\xf1\
-\x08\xa9\x00\xff\x99\x08\x74\xd3\x24\x7e\x23\x05\xc0\xf1\xfa\x95\
-\x4a\x83\x77\x7f\x2a\x41\x4c\x25\x09\x18\xc8\x88\xea\x06\x05\xef\
-\xea\xcb\xa4\x71\xe2\xac\xdf\x56\x0a\x3f\xca\x32\x9d\xd4\xf4\x13\
-\x6a\x08\xfd\x03\xec\x81\x48\x98\x49\x9e\xf7\x83\xc8\x9f\xcb\x2b\
-\x7e\xc3\xa5\xe1\xd6\x92\xc5\xa1\xe2\x39\x05\x19\x32\xa4\x4d\xdf\
-\xbb\x71\x25\x09\xf8\x6b\x35\xc1\x4f\x52\x82\xc0\x4e\x90\x5e\xa0\
-\x0a\xbe\x1b\xa6\xea\x04\x54\x84\x1f\x6c\xf6\x04\xbc\x98\xcc\x4f\
-\xfb\x01\x42\x2e\xec\x85\x50\xd8\x00\x3c\x98\x0f\x8f\xc1\x0d\x4a\
-\x09\x08\x2e\x04\x25\x51\xf8\x07\xa4\xc1\xd6\xb2\xd5\xc1\xe2\xad\
-\xd5\x68\x37\x30\x1a\x2a\x03\x4e\x93\xa8\x20\x2e\x6b\x06\xbe\xe4\
-\x59\xc1\xa7\x02\xdc\x34\x82\xe0\xf8\xf6\x90\x4c\xbe\x43\xa8\x11\
-\xdc\x31\x06\x1e\xb4\x08\x21\x81\xbf\x82\xdd\x10\x04\xeb\x48\xe0\
-\x73\x21\x81\x84\x2d\x10\x05\x2e\x89\x2b\x41\x5b\x14\xfe\x13\x69\
-\x87\x4f\x2d\x5d\x1e\x2e\x9e\xd4\xb0\x86\xce\x17\x50\x22\x15\xc4\
-\x21\x4d\x3f\x49\x30\xdf\x08\xa2\xf7\x72\xe1\xdd\x19\x75\xf0\x0d\
-\xef\x0e\xc8\x97\x85\xe8\x64\x1d\xb9\x64\x99\x39\x90\xf4\x8f\xb0\
-\xab\xa3\xfc\xb9\x5f\x44\x68\x2b\x0d\xb5\x96\xef\x0f\x20\xee\x52\
-\x7e\x45\x4b\x83\xf6\xea\x80\xb3\xc5\x15\x44\x52\x51\xcc\x70\xd7\
-\x00\x6f\x2a\x40\x6a\x0b\x08\xa4\x6f\xcd\xab\x35\xb9\xeb\xbf\x14\
-\xbe\x5d\x45\x93\x6f\xbc\x34\xd0\x3a\xb2\x41\x84\x78\x90\x69\x0a\
-\xad\x1b\x68\xca\x03\x8e\x13\x57\x10\x7f\x6d\x09\x85\xa1\xdd\x41\
-\x10\x61\x0b\x7c\x64\x20\x53\x08\xf0\xa1\xd9\x28\xc8\xa8\x31\xfc\
-\xe1\x84\xf2\x61\xde\xdd\xd2\x30\xeb\xe0\x0e\x21\xe2\xd9\xbe\x77\
-\x64\x18\xc0\x9e\xe2\x0a\xe2\xc5\x4e\x90\x49\x4b\x81\x9c\x86\xe0\
-\x43\xdf\x5e\x52\x7d\x08\xac\x36\xfc\x09\x84\xf2\x19\x3e\xb7\xfe\
-\x57\x9b\x2c\x4a\x05\xf8\x7e\x22\xd0\xd2\xa0\x4c\x9f\xd4\xe2\x97\
-\x1b\x83\x80\x0a\x10\xdb\x09\x32\xc8\xbf\x94\xd2\xb7\x38\xa2\x37\
-\x44\xfc\x43\x00\x3d\x51\xf8\x8f\x7e\xc4\x34\x74\xa9\x00\xff\x3b\
-\x09\x70\x38\xa9\xcd\x53\x01\x28\x45\x8a\x10\x44\xdf\x62\x31\x07\
-\x5e\xc8\x4d\x81\x92\x8a\xf0\x3b\x8a\xc2\x7f\xf7\x9f\xce\x32\x92\
-\x52\x4b\xf7\x08\x22\xaf\xdf\xe5\x38\xa4\x42\x68\x00\x48\xea\x02\
-\x18\x6c\x0c\xa9\xa2\xb7\x48\xc7\x0e\x0c\x21\x40\x14\x7e\xbf\x8a\
-\x99\xbd\x4e\xd2\x00\x7f\xc2\x5d\xc2\xd8\x9d\x41\xcd\xea\x03\x9e\
-\xef\x00\xf8\x9c\x53\xfe\x18\x20\x6d\x3c\xa1\x5e\x6f\x12\xbc\x02\
-\x54\x5a\xdd\x2a\xe5\x27\xdc\x26\x4e\xbc\x1a\x26\x9d\x96\x06\x6e\
-\x24\xf0\x25\xe4\x6d\xf6\xa3\x4b\xa4\x98\x8a\x4a\x9f\x82\x34\xbc\
-\x9f\x7c\x9f\x40\xf1\x80\x12\x3d\x7c\x21\x5d\xbc\xb8\x81\x72\x50\
-\x5a\xe3\xff\x17\x9f\x18\x22\x45\x2a\x80\x94\xef\xcc\xff\x01\x19\
-\x47\x8e\x78\xd3\x1b\x66\xf0\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
-\x42\x60\x82\
-\x00\x00\x05\x24\
-\x89\
-\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
-\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0\x77\x3d\xf8\
-\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
-\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x06\xec\x00\x00\x06\xec\
-\x01\x1e\x75\x38\x35\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
-\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
-\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x00\x13\x74\x45\
-\x58\x74\x41\x75\x74\x68\x6f\x72\x00\x52\x6f\x64\x6e\x65\x79\x20\
-\x44\x61\x77\x65\x73\x0e\xd8\x7e\x1d\x00\x00\x04\x82\x49\x44\x41\
-\x54\x48\x89\x8d\x96\x6d\x88\x54\x55\x18\xc7\x7f\xe7\xbe\xcd\xdc\
-\xb9\xb3\xb3\x33\xfb\x1a\xeb\xbe\xe9\x6e\xad\x2f\x69\xea\x4a\x68\
-\x8a\x22\x24\x94\x58\x7e\xaa\x48\x23\x30\x41\xa1\xec\x5b\x51\x41\
-\xf8\x31\xa3\x3e\x44\x94\x44\x92\x09\xa5\x46\x54\x84\x21\x59\x44\
-\x18\x96\x19\xe8\xb2\x5a\x9b\xba\xea\xa4\xfb\x66\xee\xce\xce\xec\
-\xec\xcc\xdc\xbd\xf3\x76\xef\xe9\x43\xb3\x63\xe6\xbe\x3d\xf0\x7c\
-\xfb\x9f\xff\xef\x3c\xcf\x3d\xf7\x39\x47\x48\x29\x99\x2d\x36\xbf\
-\x27\x7c\x6a\x40\x7f\xc0\x6f\xaa\x8f\x02\x64\x1d\xf7\x84\x3b\x51\
-\xb8\xf0\xed\x8b\x32\x37\xdb\x5a\x31\x1d\x60\xf7\x01\xa1\x67\x2a\
-\x42\x6f\x37\xd7\x76\xac\xaf\x0a\xd6\xd5\x07\x2c\x7f\xb5\x69\x05\
-\x7c\x52\xba\x38\xb6\x93\xb3\xed\x6c\x62\xdc\x8e\xdd\xea\x8b\x5d\
-\x39\x15\x4c\xa7\x5e\xfe\x70\x97\x2c\xcc\x19\xb0\xf5\xa0\xb1\xa2\
-\xb9\x7e\xfe\xa1\x65\x1d\xab\x96\x7a\x5a\x56\xc9\xbb\x0e\x92\x3b\
-\x75\x02\x81\xa1\x9a\x50\x50\xbd\x9e\x2b\xe7\xff\xe8\x1f\xbe\xb1\
-\xe3\xd8\xce\x7c\xf7\xac\x80\x6d\x9f\x87\xf7\x2d\x69\x59\xf9\x5c\
-\x5d\x7d\x4d\x9d\xe3\xa6\x67\xeb\x00\x00\x01\x35\x44\x7c\x78\x74\
-\xe4\x42\x5f\xd7\xc7\x47\x9f\x4a\xbe\x36\x2d\xe0\x89\xc3\xe6\xb3\
-\x6b\x96\xae\xdf\x6f\x04\xf5\xa0\x27\xdd\x39\x99\x87\xb4\x7a\xaa\
-\xb4\x05\xa4\x8a\x43\x8c\x24\xaf\x65\xce\xf4\xfc\xfa\xc2\x17\xcf\
-\x38\x9f\xdc\x05\x78\xfc\x80\xa8\x59\xdc\xb6\xfc\xb7\xc6\xd6\x79\
-\x6d\x73\x33\x17\x6c\xa8\xdc\x43\x83\xb1\x0c\x9f\x08\x72\xc9\xf9\
-\x9e\x73\x99\x4f\x19\xb8\xd1\x17\xbd\x18\xed\x59\xfd\xcd\x2e\x39\
-\x0a\xa0\x4c\xca\xab\xab\xea\x8e\x34\x35\x37\xcc\xd1\x1c\x56\x5a\
-\x4f\xd2\xea\x5b\x8d\x5f\x54\x10\x2b\x46\x39\x97\x39\x8c\x2b\x5d\
-\x5a\x9a\x5b\xdb\xaa\xab\x6a\x8f\x4c\xea\x14\xf8\xf7\xa3\xde\xd7\
-\xd2\xb1\xd6\x15\x73\x33\xaf\xd1\xdb\xb9\xd7\xdc\x88\x82\x4a\xda\
-\x1d\xe1\x97\xd4\x07\xb8\xb2\x08\x40\x51\xb8\xb4\x37\xcf\x5f\xbb\
-\xf5\xa0\xb1\xa2\x0c\xf0\xfb\xd4\x2d\x56\xd0\xb4\x40\x20\xc4\x54\
-\xa9\x94\x53\x53\xfc\xac\x0b\xed\x22\xa0\x86\xc9\x4b\x87\x4b\xce\
-\x09\xd2\xee\x2d\x14\xa1\xa2\x08\x0d\x21\x54\x02\x96\xdf\xf2\xfb\
-\xd4\x2d\x00\x1a\x80\x65\x55\x76\xaa\x86\x86\xbc\xdd\xb1\x52\x97\
-\xc5\x5d\xbb\x5f\x57\xb1\x9b\x88\xd6\x02\xc0\xad\x42\x0f\x97\xb3\
-\x3f\xa2\x08\xed\x0e\x8d\x66\xe8\x58\x66\xa8\xb3\x0c\x08\x9b\x91\
-\x26\x4f\x4a\x54\x45\x9d\xd1\x7c\x81\xff\x21\x1a\x7d\xcb\xf1\x3c\
-\x8f\x91\x42\x2f\xc7\xe3\x7b\x31\xd5\x10\x86\x12\xb8\x43\x57\x14\
-\x0a\xe1\x40\xa8\xa9\x0c\x30\x0d\xb3\x5a\x11\x3a\x0a\x0a\x4c\x61\
-\x0c\x10\x50\x23\x2c\x31\x1f\x43\xb8\x3a\x49\xf7\x26\xc7\xe3\x7b\
-\x89\xe5\xa3\x00\x58\x6a\x15\x11\xa3\x11\x53\x0d\x23\x10\xe4\xbd\
-\x1c\xa6\xe1\xab\x2e\x03\x9c\xfc\x44\x5c\x57\x8c\x96\xff\x9a\x77\
-\x98\x9b\x08\x28\x61\xba\xed\x2f\xf1\x64\x91\x4e\x73\x1b\x16\xb5\
-\x14\x65\x8e\x0b\xf6\x57\x0c\xe5\xcf\x97\xb5\xb6\x9b\xc0\x76\x12\
-\xf8\x14\x8b\xb0\xde\x48\xde\x4b\xe1\xe4\xb3\xf1\x32\x20\x69\x8f\
-\x0d\x08\xe9\xad\x14\xc2\x00\x60\x51\xe0\x11\x96\x04\x36\x23\x50\
-\x18\x2b\x0c\x52\x70\x73\xdc\xa3\x2d\x06\xa0\x3f\x77\x96\x53\xa9\
-\xfd\x53\x56\x99\xf3\x6c\x86\x73\xbd\x54\xa9\xb5\x24\x27\xc6\x07\
-\xca\x00\xdb\x49\x75\x25\x9d\xa1\xad\xe8\x3a\xa6\x5a\x89\x81\x85\
-\xf0\x34\x3c\xcf\xa3\xcd\xd8\x80\x2e\x4c\x14\xa1\x13\x2b\x5c\xe5\
-\x58\xe2\x55\x60\xe6\x09\xac\xba\x3a\xb6\x93\xe9\x2a\x03\xb2\x39\
-\xf7\x78\x2e\x53\x7c\x65\x3c\xd0\x67\x49\x3c\xbc\xa2\xa4\xb5\x76\
-\x0d\x9a\xf0\x53\x55\x3a\x31\xb6\x1b\xe7\x64\xea\x1d\x26\xbc\xc4\
-\x8c\xe6\x02\x85\x82\x2d\xed\x6c\xce\x3d\x0e\xa5\xff\xe0\xd8\xce\
-\x7c\x77\x74\xa0\xff\x74\x8d\xd2\x00\xc0\x60\xbe\x9b\x44\xb1\xbf\
-\xbc\xc8\x93\x2e\x97\x9c\xef\xb8\xea\xfc\x34\xa3\x39\x40\x8d\x52\
-\x47\x74\x70\xf0\xf4\xe4\x64\x2d\x1f\xfc\x78\x22\xb6\x3d\x31\xe4\
-\x44\x4d\xc5\xc2\xc3\xe5\x77\xfb\x6b\x3c\x59\xa4\x20\x1d\x06\xf2\
-\xe7\xf8\x21\xf9\xe6\xac\xe6\xa6\x12\x20\x31\xe4\x44\xe3\x89\xf8\
-\xf6\x72\x45\xff\x9f\xa6\x9d\x0b\xef\x7f\x3f\xe9\xff\xbb\xc2\x95\
-\x1e\x0b\xcd\x87\xc9\xca\x34\xd7\xb3\x67\x98\xad\xef\x9a\xd0\x08\
-\x3b\xb5\xe9\xae\xde\xde\x3d\x53\x4e\xd3\xc9\xd8\xf6\x59\x78\x5f\
-\xc7\xbc\x96\x1d\x6a\x4d\xa1\x3e\xe9\xc6\x67\xdd\x35\x40\x58\x8d\
-\xe0\x8e\x6a\xc3\xbd\x43\xfd\x87\x8e\x3e\x9d\x9a\xfe\x3e\x98\x8c\
-\x4d\x6f\x19\xab\xdb\xda\xeb\x3f\x5a\xd0\x5e\xb7\x28\xad\x24\x94\
-\xac\x9c\xfa\x46\xf3\x0b\x3f\x15\x6e\xd8\xfb\xeb\xda\xc8\xe5\x8b\
-\x5d\xb1\xe7\x7f\xde\x57\x3c\x2d\x65\x69\xea\x4d\x05\x10\x42\xa8\
-\x40\x35\x10\x36\x2b\xa8\xdd\xf8\x7a\xf0\xa5\x96\xe6\xea\x65\xa1\
-\x50\x20\x12\x08\x2b\x41\xa3\x42\xd1\x01\x72\x29\xaf\x38\x91\x2c\
-\x66\xd2\xa9\x6c\xf2\xfa\xf5\xd8\x9f\x27\xdf\x98\x78\x37\x67\x33\
-\x0a\x8c\x01\x49\x29\x65\x72\xda\x0a\x84\x10\x16\x10\x01\x2a\x4b\
-\x59\x61\x04\x89\x34\x3c\xa8\x2c\x6c\x5a\xa5\xad\x90\x12\x65\xe0\
-\x6c\xf1\xfc\xcd\xb3\x5e\xb4\x60\x33\x06\x64\x80\x14\x30\x5e\xca\
-\x84\x94\xb7\x1f\x00\xd3\xbe\x2a\x4a\x30\x1d\x30\x00\x5f\x29\x75\
-\x40\x05\x8a\x40\x0e\xc8\x03\x59\xa0\x20\xe5\xd4\x37\xd5\x3f\x13\
-\x05\x02\x8c\xec\xcf\x7e\xae\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
-\x42\x60\x82\
-\x00\x00\x05\x64\
-\x89\
-\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
-\x00\x00\x18\x00\x00\x00\x18\x08\x06\x00\x00\x00\xe0\x77\x3d\xf8\
-\x00\x00\x00\x04\x73\x42\x49\x54\x08\x08\x08\x08\x7c\x08\x64\x88\
-\x00\x00\x00\x09\x70\x48\x59\x73\x00\x00\x0d\xd7\x00\x00\x0d\xd7\
-\x01\x42\x28\x9b\x78\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\
-\x74\x77\x61\x72\x65\x00\x77\x77\x77\x2e\x69\x6e\x6b\x73\x63\x61\
-\x70\x65\x2e\x6f\x72\x67\x9b\xee\x3c\x1a\x00\x00\x04\xe1\x49\x44\
-\x41\x54\x48\x89\xb5\x95\x4b\x6c\x54\x55\x18\xc7\x7f\xe7\x9c\x3b\
-\xd3\x99\x32\xb5\xf4\x41\x1f\x38\x02\xa5\x8d\x4c\x54\xb0\x25\x48\
-\xa2\x12\x62\x14\x17\x46\x12\x64\x81\x26\xb0\x31\x18\x12\x36\x04\
-\xd2\x45\xe9\xca\x84\x5d\x29\x1a\x43\x80\x6e\x0c\x2b\xdc\x88\x09\
-\xa0\x21\x04\x35\x18\xa2\x26\x3e\xa3\x62\xa8\x4f\x7c\x61\x95\x81\
-\xb6\x4c\xcb\xbc\x67\xee\x39\x9f\x8b\xdb\xde\x4e\x2b\x10\x37\xde\
-\xe4\x4b\xee\xf3\xf7\xff\xfe\xdf\xf9\xee\x77\x94\x88\xf0\x7f\x1e\
-\xde\xdd\x1e\x9e\x53\xaa\x23\x6a\xcc\x33\xf1\x58\xec\xc9\xc6\xae\
-\x15\x2b\xeb\x97\x76\x2e\xb5\xc5\x62\x29\xfb\xd7\x5f\x63\xb9\x6b\
-\xd7\x7f\x2c\x55\x2a\xe7\x2b\xd6\x7e\xb0\x59\xa4\x70\x27\x86\xba\
-\xad\x03\xa5\xd4\x87\xb1\xd8\xcb\xc9\xc7\x1e\xdd\xdd\xd9\xde\xd6\
-\x11\xab\x8b\xa3\x4a\x45\xc8\xe5\xc1\x18\x48\x2c\xc2\x79\x86\x5c\
-\x21\xef\x7e\x1f\xbd\x7c\x65\xe2\xa7\x5f\xf7\x3c\x59\xad\xbe\xf7\
-\x9f\x04\xce\x2b\xb5\xa2\x75\x69\xe7\x1b\xa9\x0d\x8f\xaf\x4f\x54\
-\x6d\x84\x52\x29\x7c\x26\xc0\xec\xfb\x32\x13\x34\x24\xb8\x91\x9f\
-\x9e\xfa\xe5\xe3\x4f\xdf\xb6\xb7\xb2\xbb\x9f\x10\x29\xd5\xf2\xe6\
-\x09\xbc\xa7\x54\x6a\xc5\xda\xde\xf7\x7b\xba\xbb\x93\x3a\x57\x98\
-\x83\x2c\x80\x2e\x14\x11\xcf\xc3\xc6\x23\xf2\xdd\xa7\x9f\x7d\x91\
-\x1f\xbb\xf6\xf8\x13\x22\xfe\x2c\x53\xcf\x9e\x1c\x50\x4a\xb7\x2e\
-\x5f\x76\xa2\x67\xf9\xb2\x24\xd9\x3c\x4e\x24\x0c\x0b\xd8\x9a\x6b\
-\x07\xc1\x3d\xc0\x01\xce\xf7\x21\x5b\x54\x3d\x0f\xaf\x5e\xa7\xeb\
-\xeb\x5f\xab\x75\x10\x0a\x6c\x8a\xc7\x87\xee\x7f\xe8\x81\x3e\x29\
-\x56\x02\x80\x08\x95\xed\xdb\xa9\x6e\xd9\x32\x27\x52\x03\xb5\x9e\
-\x87\x1d\x18\xc0\xef\xed\x0d\xc5\x8d\x55\x3a\xd9\xf7\xe0\x0b\xef\
-\x46\xa3\xeb\x67\xb9\x1e\xc0\x05\xa5\xee\x5f\xf9\xd8\xfa\x17\xeb\
-\x7c\x67\x1c\x0a\x01\xec\x8e\x1d\xf8\xcf\x3d\x17\x64\xa1\x14\xfa\
-\xf4\xe9\xb0\x3c\x2e\x12\x81\xfd\xfb\xa1\xb7\x17\xfa\xfa\x70\x43\
-\x43\xf0\xf5\xd7\x88\x15\x9a\x1b\x9b\x96\x34\x25\xdb\x5f\x47\xa9\
-\x5e\x44\x44\x03\x78\x9e\xf7\x52\xc7\xe2\xa6\x25\x0e\x15\x64\xe9\
-\x79\xd8\x9e\x9e\xd0\xa6\xdb\xbe\x1d\x7f\xeb\xd6\x20\xd3\x68\x14\
-\x06\x07\x51\x7d\x7d\x28\xa5\x50\x91\x08\x74\x77\x63\x95\xc2\x29\
-\x85\xad\x38\x5a\xdb\x5a\x97\x9d\x85\x9e\xd0\xc1\xa2\xd6\x96\x07\
-\x11\x70\x5a\x07\x59\x3a\x07\x43\x43\xa8\xc1\x41\x58\xbd\x3a\x50\
-\xd9\xb1\x03\x17\x89\xa0\x52\x29\xd4\x9a\x35\xa1\xb8\x3d\x79\x12\
-\xff\xcc\x19\xc4\x18\x10\x41\xb4\xa6\x2e\x16\x5b\x1c\xd1\xfa\x29\
-\xe0\x67\x0d\x10\x6b\x6d\xba\xcf\x39\xc1\x69\x3d\x17\xd6\xe2\x86\
-\x87\x91\xcb\x97\xe7\x16\xec\xf9\xe7\x43\xb8\x88\xe0\xbf\xf9\x26\
-\xd5\x53\xa7\x82\xc4\xb4\xc6\x79\x1e\x62\x0c\x26\x12\xa5\x2e\x11\
-\xdf\x00\xa0\xdf\x52\x2a\x51\xb7\x28\xde\xee\xbc\xc8\x7c\x01\xad\
-\x71\xce\xe1\xbf\xf2\x0a\x32\x3a\x3a\x57\x2e\xe7\xf0\x7d\x9f\xe2\
-\xa1\x43\x64\x07\x06\x28\x5f\xb9\x82\x2d\x95\x70\xc6\x04\xdf\x18\
-\x83\xad\x5a\xe2\x4b\x9a\x97\x87\x25\xc2\x98\x60\xe1\x6a\x7b\x7b\
-\x06\x28\xc6\x20\x4a\x81\x13\x9c\xb8\xf0\x1f\x70\x80\xb5\x16\x3f\
-\x9d\x46\xd2\x69\x74\x4b\x0b\x5e\x32\x89\x6e\x6e\xc6\x96\xcb\x88\
-\xb8\xa0\x8b\xb6\x89\xe4\xbe\x79\x68\xd5\x75\xb9\xd7\xb4\x8b\x95\
-\x00\xae\x54\x00\x8f\x44\x88\xec\xdb\x07\xa9\x14\xd6\xd9\xda\xf6\
-\x26\xd6\xdf\x8f\x03\x0a\x87\x0f\x07\x5d\x37\x31\x41\x65\x7c\x1c\
-\xe2\x71\xbc\xce\x26\xf2\x13\x53\x7f\x84\xff\x41\x71\x32\xf3\xa7\
-\x3f\x39\x81\xd3\x26\xb4\xea\x97\xca\xe8\x5d\xbb\x90\x55\xab\x70\
-\x2e\xc8\xa6\xf0\xea\xab\x54\x3f\xfa\x28\x14\xa9\xef\xef\x27\xb6\
-\x77\x2f\x56\x24\x08\xc0\x2f\x14\xa8\x7a\x9a\x72\xae\xf8\x71\x58\
-\xa2\xc2\x44\x66\xb4\x9c\xcd\x3c\xeb\x46\x7f\x40\x25\x12\x38\xdf\
-\x27\x3e\x32\x82\x5e\xbb\x36\x84\xe5\x87\x87\x29\x1c\x3b\x86\x44\
-\xa3\x34\x1e\x3f\x4e\xdd\xc6\x8d\x00\x24\xfa\xfb\x71\xbe\x4f\xf6\
-\xc8\x91\xb0\xb4\x3e\x4c\x39\xe7\x2e\x84\x0e\xf0\xfd\xe3\x19\xe7\
-\xc6\x2d\x8e\xca\xf8\x38\xd5\x4c\x06\xff\xd2\xa5\x10\x9e\x3b\x78\
-\x90\xdc\xd1\xa3\x41\x96\xe5\x32\x93\x3b\x77\x52\xba\x78\x31\x28\
-\x63\xb9\x4c\xe9\xdb\x6f\xf1\x67\x5c\xe8\xf6\x26\xa6\x27\x32\x57\
-\x37\xc3\x15\xa8\x19\x76\x17\xea\xeb\x87\xbb\xfa\x52\xfd\xf6\xf2\
-\x2f\x46\x24\x58\x8b\xc4\xe0\x20\xf6\xd6\x2d\x72\x23\x23\x41\x8f\
-\xd7\x36\x41\x34\x4a\xeb\xc8\x08\xd9\x13\x27\x28\x5c\xbc\x18\x0c\
-\xc4\xa8\x87\xac\xec\x18\xff\xfb\xab\x9f\x36\x6f\x16\xf9\x7c\x9e\
-\xc0\x01\xa5\xf4\xa6\x64\xfb\x67\x1d\x0d\x0d\xeb\x2a\x63\xd7\x91\
-\x05\xc0\xd9\xde\x9f\x27\x52\x73\x0d\x50\x97\xba\xcf\xa5\xbf\xff\
-\x7d\xe4\xe9\x42\x69\xcf\xac\xfb\x79\xe3\xfa\xac\x52\xa9\xb6\x54\
-\xd7\xfb\x8d\xc6\x24\x2b\x7f\xa4\xff\x05\x58\x08\x0d\x47\xb7\x67\
-\x88\x76\x77\xca\x74\xfa\xe6\x17\x92\x9e\x9c\x37\xae\x6f\xbb\xe1\
-\xc4\xda\x9a\xdf\x58\xb2\x62\xe9\x23\xfe\x6f\xd7\xa2\x36\x5f\x9c\
-\x2f\xb0\x40\xc4\x6b\x6b\x44\x9a\x13\x99\xc9\x1f\xc7\xde\x89\x15\
-\x4a\x77\xdf\x70\x00\x94\x52\x0d\x31\x68\x39\x12\x31\x03\xa9\x55\
-\xcb\xb7\x35\x18\xd3\xaa\x7c\x8b\x9d\xce\x53\x9d\xce\x83\xd1\x98\
-\xc5\x09\x74\x43\x0c\xab\x95\x4c\x66\x72\x63\x17\xae\x5e\x3f\x78\
-\x08\xde\x05\xa6\x80\x29\xb9\x93\x03\xa5\x54\x04\x68\x01\x16\x03\
-\x4d\xbd\xd0\xb5\x49\xeb\x8d\x5d\x9e\xe9\x6b\x69\x69\x68\x6b\x6c\
-\x5c\xd4\x58\xf5\xad\xbd\x99\xc9\x4d\xdf\x98\xce\x5f\x1b\xf5\xed\
-\x97\xe7\xe0\x93\x71\x48\xcf\xc2\x81\x8c\x88\x64\xef\xe8\x60\x46\
-\xa8\x1e\xb8\x67\x26\xea\x81\x38\x50\x07\x18\x40\x11\xec\x3b\x15\
-\xa0\x0c\xe4\x81\x1c\x70\x0b\xc8\xca\xec\x8c\x98\x39\xfe\x01\x76\
-\x95\xba\xf1\x06\x3a\xff\x81\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
-\x42\x60\x82\
-"
-
-qt_resource_name = "\
-\x00\x06\
-\x07\x03\x7d\xc3\
-\x00\x69\
-\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
-\x00\x0e\
-\x05\xcd\xf4\xe7\
-\x00\x63\
-\x00\x6f\x00\x6e\x00\x6e\x00\x5f\x00\x65\x00\x72\x00\x72\x00\x6f\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
-\x00\x13\
-\x09\xd2\x6c\x67\
-\x00\x45\
-\x00\x6d\x00\x62\x00\x6c\x00\x65\x00\x6d\x00\x2d\x00\x71\x00\x75\x00\x65\x00\x73\x00\x74\x00\x69\x00\x6f\x00\x6e\x00\x2e\x00\x70\
-\x00\x6e\x00\x67\
-\x00\x12\
-\x04\xe4\x91\x47\
-\x00\x63\
-\x00\x6f\x00\x6e\x00\x6e\x00\x5f\x00\x63\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x63\x00\x74\x00\x65\x00\x64\x00\x2e\x00\x70\x00\x6e\
-\x00\x67\
-\x00\x13\
-\x0d\x76\x37\xc7\
-\x00\x63\
-\x00\x6f\x00\x6e\x00\x6e\x00\x5f\x00\x63\x00\x6f\x00\x6e\x00\x6e\x00\x65\x00\x63\x00\x74\x00\x69\x00\x6e\x00\x67\x00\x2e\x00\x70\
-\x00\x6e\x00\x67\
-\x00\x14\
-\x00\xe9\x23\x87\
-\x00\x6c\
-\x00\x65\x00\x61\x00\x70\x00\x2d\x00\x63\x00\x6f\x00\x6c\x00\x6f\x00\x72\x00\x2d\x00\x73\x00\x6d\x00\x61\x00\x6c\x00\x6c\x00\x2e\
-\x00\x70\x00\x6e\x00\x67\
-\x00\x11\
-\x06\x1a\x44\xa7\
-\x00\x44\
-\x00\x69\x00\x61\x00\x6c\x00\x6f\x00\x67\x00\x2d\x00\x61\x00\x63\x00\x63\x00\x65\x00\x70\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
-\
-\x00\x10\
-\x0f\xc3\x90\x67\
-\x00\x44\
-\x00\x69\x00\x61\x00\x6c\x00\x6f\x00\x67\x00\x2d\x00\x65\x00\x72\x00\x72\x00\x6f\x00\x72\x00\x2e\x00\x70\x00\x6e\x00\x67\
-"
-
-qt_resource_struct = "\
-\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
-\x00\x00\x00\x00\x00\x02\x00\x00\x00\x07\x00\x00\x00\x02\
-\x00\x00\x00\xb6\x00\x00\x00\x00\x00\x01\x00\x00\x0f\x03\
-\x00\x00\x00\x60\x00\x00\x00\x00\x00\x01\x00\x00\x0a\x89\
-\x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
-\x00\x00\x00\xe4\x00\x00\x00\x00\x00\x01\x00\x00\x36\x7b\
-\x00\x00\x00\x34\x00\x00\x00\x00\x00\x01\x00\x00\x05\x99\
-\x00\x00\x00\x8a\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x37\
-\x00\x00\x01\x0c\x00\x00\x00\x00\x00\x01\x00\x00\x3b\xa3\
-"
-
-def qInitResources():
- QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
-
-def qCleanupResources():
- QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
-
-qInitResources()
diff --git a/src/leap/gui/progress.py b/src/leap/gui/progress.py
deleted file mode 100644
index ca4f6cc3..00000000
--- a/src/leap/gui/progress.py
+++ /dev/null
@@ -1,488 +0,0 @@
-"""
-classes used in progress pages
-from first run wizard
-"""
-try:
- from collections import OrderedDict
-except ImportError: # pragma: no cover
- # We must be in 2.6
- from leap.util.dicts import OrderedDict
-
-import logging
-
-from PyQt4 import QtCore
-from PyQt4 import QtGui
-
-from leap.gui.threads import FunThread
-
-from leap.gui import mainwindow_rc
-
-ICON_CHECKMARK = ":/images/Dialog-accept.png"
-ICON_FAILED = ":/images/Dialog-error.png"
-ICON_WAITING = ":/images/Emblem-question.png"
-
-logger = logging.getLogger(__name__)
-
-
-class ImgWidget(QtGui.QWidget):
-
- # XXX move to widgets
-
- def __init__(self, parent=None, img=None):
- super(ImgWidget, self).__init__(parent)
- self.pic = QtGui.QPixmap(img)
-
- def paintEvent(self, event):
- painter = QtGui.QPainter(self)
- painter.drawPixmap(0, 0, self.pic)
-
-
-class ProgressStep(object):
- """
- Data model for sequential steps
- to be used in a progress page in
- connection wizard
- """
- NAME = 0
- DONE = 1
-
- def __init__(self, stepname, done, index=None):
- """
- @param step: the name of the step
- @type step: str
- @param done: whether is completed or not
- @type done: bool
- """
- self.index = int(index) if index else 0
- self.name = unicode(stepname)
- self.done = bool(done)
-
- @classmethod
- def columns(self):
- return ('name', 'done')
-
-
-class ProgressStepContainer(object):
- """
- a container for ProgressSteps objects
- access data in the internal dict
- """
-
- def __init__(self):
- self.dirty = False
- self.steps = {}
-
- def step(self, identity):
- return self.steps.get(identity, None)
-
- def addStep(self, step):
- self.steps[step.index] = step
-
- def removeStep(self, step):
- if step and self.steps.get(step.index, None):
- del self.steps[step.index]
- del step
- self.dirty = True
-
- def removeAllSteps(self):
- for item in iter(self):
- self.removeStep(item)
-
- @property
- def columns(self):
- return ProgressStep.columns()
-
- def __len__(self):
- return len(self.steps)
-
- def __iter__(self):
- for step in self.steps.values():
- yield step
-
-
-class StepsTableWidget(QtGui.QTableWidget):
- """
- initializes a TableWidget
- suitable for our display purposes, like removing
- header info and grid display
- """
-
- def __init__(self, parent=None):
- super(StepsTableWidget, self).__init__(parent=parent)
-
- # remove headers and all edit/select behavior
- self.horizontalHeader().hide()
- self.verticalHeader().hide()
- self.setEditTriggers(
- QtGui.QAbstractItemView.NoEditTriggers)
- self.setSelectionMode(
- QtGui.QAbstractItemView.NoSelection)
- width = self.width()
-
- # WTF? Here init width is 100...
- # but on populating is 456... :(
- #logger.debug('init table. width=%s' % width)
-
- # XXX do we need this initial?
- self.horizontalHeader().resizeSection(0, width * 0.7)
-
- # this disables the table grid.
- # we should add alignment to the ImgWidget (it's top-left now)
- self.setShowGrid(False)
- self.setFocusPolicy(QtCore.Qt.NoFocus)
- #self.setStyleSheet("QTableView{outline: 0;}")
-
- # XXX change image for done to rc
-
- # Note about the "done" status painting:
- #
- # XXX currently we are setting the CellWidget
- # for the whole table on a per-row basis
- # (on add_status_line method on ValidationPage).
- # However, a more generic solution might be
- # to implement a custom Delegate that overwrites
- # the paint method (so it paints a checked tickmark if
- # done is True and some other thing if checking or false).
- # What we have now is quick and works because
- # I'm supposing that on first fail we will
- # go back to previous wizard page to signal the failure.
- # A more generic solution could be used for
- # some failing tests if they are not critical.
-
-
-class WithStepsMixIn(object):
- """
- This Class is a mixin that can be inherited
- by InlineValidation pages (which will display
- a progress steps widget in the same page as the form)
- or by Validation Pages (which will only display
- the progress steps in the page, below a progress bar widget)
- """
- STEPS_TIMER_MS = 100
-
- #
- # methods related to worker threads
- # launched for individual checks
- #
-
- def setupStepsProcessingQueue(self):
- """
- should be called from the init method
- of the derived classes
- """
- self.steps_queue = Queue.Queue()
- self.stepscheck_timer = QtCore.QTimer()
- self.stepscheck_timer.timeout.connect(self.processStepsQueue)
- self.stepscheck_timer.start(self.STEPS_TIMER_MS)
- # we need to keep a reference to child threads
- self.threads = []
-
- def do_checks(self):
- """
- main entry point for checks.
- it calls _do_checks in derived classes,
- and it expects it to be a generator
- yielding a tuple in the form (("message", progress_int), checkfunction)
- """
-
- # yo dawg, I heard you like checks
- # so I put a __do_checks in your do_checks
- # for calling others' _do_checks
-
- def __do_checks(fun=None, queue=None):
-
- for checkcase in fun(): # pragma: no cover
- checkmsg, checkfun = checkcase
-
- queue.put(checkmsg)
- if checkfun() is False:
- queue.put("failed")
- break
-
- t = FunThread(fun=partial(
- __do_checks,
- fun=self._do_checks,
- queue=self.steps_queue))
- if hasattr(self, 'on_checks_validation_ready'):
- t.finished.connect(self.on_checks_validation_ready)
- t.begin()
- self.threads.append(t)
-
- def processStepsQueue(self):
- """
- consume steps queue
- and pass messages
- to the ui updater functions
- """
- while self.steps_queue.qsize():
- try:
- status = self.steps_queue.get(0)
- if status == "failed":
- self.set_failed_icon()
- else:
- self.onStepStatusChanged(*status)
- except Queue.Empty: # pragma: no cover
- pass
-
- def fail(self, err=None):
- """
- return failed state
- and send error notification as
- a nice side effect. this function is called from
- the _do_checks check functions returned in the
- generator.
- """
- wizard = self.wizard()
- senderr = lambda err: wizard.set_validation_error(
- self.current_page, err)
- self.set_undone()
- if err:
- senderr(err)
- return False
-
- @QtCore.pyqtSlot()
- def launch_checks(self):
- self.do_checks()
-
- # (gui) presentation stuff begins #####################
-
- # slot
- #@QtCore.pyqtSlot(str, int)
- def onStepStatusChanged(self, status, progress=None):
- status = unicode(status)
- if status not in ("head_sentinel", "end_sentinel"):
- self.add_status_line(status)
- if status in ("end_sentinel"):
- #self.checks_finished = True
- self.set_checked_icon()
- if progress and hasattr(self, 'progress'):
- self.progress.setValue(progress)
- self.progress.update()
-
- def setupSteps(self):
- self.steps = ProgressStepContainer()
- # steps table widget
- if isinstance(self, QtCore.QObject):
- parent = self
- else:
- parent = None
- self.stepsTableWidget = StepsTableWidget(parent=parent)
- zeros = (0, 0, 0, 0)
- self.stepsTableWidget.setContentsMargins(*zeros)
- self.errors = OrderedDict()
-
- def set_error(self, name, error):
- self.errors[name] = error
-
- def pop_first_error(self):
- errkey, errval = list(reversed(self.errors.items())).pop()
- del self.errors[errkey]
- return errkey, errval
-
- def clean_errors(self):
- self.errors = OrderedDict()
-
- def clean_wizard_errors(self, pagename=None):
- if pagename is None: # pragma: no cover
- pagename = getattr(self, 'prev_page', None)
- if pagename is None: # pragma: no cover
- return
- #logger.debug('cleaning wizard errors for %s' % pagename)
- self.wizard().set_validation_error(pagename, None)
-
- def populateStepsTable(self):
- # from examples,
- # but I guess it's not needed to re-populate
- # the whole table.
- table = self.stepsTableWidget
- table.setRowCount(len(self.steps))
- columns = self.steps.columns
- table.setColumnCount(len(columns))
-
- for row, step in enumerate(self.steps):
- item = QtGui.QTableWidgetItem(step.name)
- item.setData(QtCore.Qt.UserRole,
- long(id(step)))
- table.setItem(row, columns.index('name'), item)
- table.setItem(row, columns.index('done'),
- QtGui.QTableWidgetItem(step.done))
- self.resizeTable()
- self.update()
-
- def clearTable(self):
- # ??? -- not sure what's the difference
- #self.stepsTableWidget.clear()
- self.stepsTableWidget.clearContents()
-
- def resizeTable(self):
- # resize first column to ~80%
- table = self.stepsTableWidget
- FIRST_COLUMN_PERCENT = 0.70
- width = table.width()
- #logger.debug('populate table. width=%s' % width)
- table.horizontalHeader().resizeSection(0, width * FIRST_COLUMN_PERCENT)
-
- def set_item_icon(self, img=ICON_CHECKMARK, current=True):
- """
- mark the last item
- as done
- """
- # setting cell widget.
- # see note on StepsTableWidget about plans to
- # change this for a better solution.
- if not hasattr(self, 'steps'):
- return
- index = len(self.steps)
- table = self.stepsTableWidget
- _index = index - 1 if current else index - 2
- table.setCellWidget(
- _index,
- ProgressStep.DONE,
- ImgWidget(img=img))
- table.update()
-
- def set_failed_icon(self):
- self.set_item_icon(img=ICON_FAILED, current=True)
-
- def set_checking_icon(self):
- self.set_item_icon(img=ICON_WAITING, current=True)
-
- def set_checked_icon(self, current=True):
- self.set_item_icon(current=current)
-
- def add_status_line(self, message):
- """
- adds a new status line
- and mark the next-to-last item
- as done
- """
- index = len(self.steps)
- step = ProgressStep(message, False, index=index)
- self.steps.addStep(step)
- self.populateStepsTable()
- self.set_checking_icon()
- self.set_checked_icon(current=False)
-
- # Sets/unsets done flag
- # for isComplete checks
-
- def set_done(self):
- self.done = True
- self.completeChanged.emit()
-
- def set_undone(self):
- self.done = False
- self.completeChanged.emit()
-
- def is_done(self):
- return self.done
-
- # convenience for going back and forth
- # in the wizard pages.
-
- def go_back(self):
- self.wizard().back()
-
- def go_next(self):
- self.wizard().next()
-
-
-"""
-We will use one base class for the intermediate pages
-and another one for the in-page validations, both sharing the creation
-of the tablewidgets.
-The logic of this split comes from where I was trying to solve
-the ui update using signals, but now that it's working well with
-queues I could join them again.
-"""
-
-import Queue
-from functools import partial
-
-
-class InlineValidationPage(QtGui.QWizardPage, WithStepsMixIn):
-
- def __init__(self, parent=None):
- super(InlineValidationPage, self).__init__(parent)
- self.setupStepsProcessingQueue()
- self.done = False
-
- # slot
-
- @QtCore.pyqtSlot()
- def showStepsFrame(self):
- self.valFrame.show()
- self.update()
-
- # progress frame
-
- def setupValidationFrame(self):
- qframe = QtGui.QFrame
- valFrame = qframe()
- valFrame.setFrameStyle(qframe.NoFrame)
- valframeLayout = QtGui.QVBoxLayout()
- zeros = (0, 0, 0, 0)
- valframeLayout.setContentsMargins(*zeros)
-
- valframeLayout.addWidget(self.stepsTableWidget)
- valFrame.setLayout(valframeLayout)
- self.valFrame = valFrame
-
-
-class ValidationPage(QtGui.QWizardPage, WithStepsMixIn):
- """
- class to be used as an intermediate
- between two pages in a wizard.
- shows feedback to the user and goes back if errors,
- goes forward if ok.
- initializePage triggers a one shot timer
- that calls do_checks.
- Derived classes should implement
- _do_checks and
- _do_validation
- """
-
- # signals
- stepChanged = QtCore.pyqtSignal([str, int])
-
- def __init__(self, parent=None):
- super(ValidationPage, self).__init__(parent)
- self.setupSteps()
- #self.connect_step_status()
-
- layout = QtGui.QVBoxLayout()
- self.progress = QtGui.QProgressBar(self)
- layout.addWidget(self.progress)
- layout.addWidget(self.stepsTableWidget)
-
- self.setLayout(layout)
- self.layout = layout
-
- self.timer = QtCore.QTimer()
- self.done = False
-
- self.setupStepsProcessingQueue()
-
- def isComplete(self):
- return self.is_done()
-
- ########################
-
- def show_progress(self):
- self.progress.show()
- self.stepsTableWidget.show()
-
- def hide_progress(self):
- self.progress.hide()
- self.stepsTableWidget.hide()
-
- # pagewizard methods.
- # if overriden, child classes should call super.
-
- def initializePage(self):
- self.clean_errors()
- self.clean_wizard_errors()
- self.steps.removeAllSteps()
- self.clearTable()
- self.resizeTable()
- self.timer.singleShot(0, self.do_checks)
diff --git a/src/leap/gui/styles.py b/src/leap/gui/styles.py
deleted file mode 100644
index b482922e..00000000
--- a/src/leap/gui/styles.py
+++ /dev/null
@@ -1,16 +0,0 @@
-GreenLineEdit = "QLabel {color: green; font-weight: bold}"
-ErrorLabelStyleSheet = """QLabel { color: red; font-weight: bold }"""
-ErrorLineEdit = """QLineEdit { border: 1px solid red; }"""
-
-
-# XXX this is bad.
-# and you should feel bad for it.
-# The original style has a sort of box color
-# white/beige left-top/right-bottom or something like
-# that.
-
-RegularLineEdit = """
-QLineEdit {
- border: 1px solid black;
-}
-"""
diff --git a/src/leap/gui/tests/__init__.py b/src/leap/gui/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/gui/tests/__init__.py
+++ /dev/null
diff --git a/src/leap/gui/tests/integration/fake_user_signup.py b/src/leap/gui/tests/integration/fake_user_signup.py
deleted file mode 100644
index 78873749..00000000
--- a/src/leap/gui/tests/integration/fake_user_signup.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
-simple server to test registration and
-authentication
-
-To test:
-
-curl -d login=python_test_user -d password_salt=54321\
- -d password_verifier=12341234 \
- http://localhost:8000/users.json
-
-"""
-from BaseHTTPServer import HTTPServer
-from BaseHTTPServer import BaseHTTPRequestHandler
-import cgi
-import json
-import urlparse
-
-HOST = "localhost"
-PORT = 8000
-
-LOGIN_ERROR = """{"errors":{"login":["has already been taken"]}}"""
-
-from leap.base.tests.test_providers import EXPECTED_DEFAULT_CONFIG
-
-
-class request_handler(BaseHTTPRequestHandler):
- responses = {
- '/': ['ok\n'],
- '/users.json': ['ok\n'],
- '/timeout': ['ok\n'],
- '/provider.json': ['%s\n' % json.dumps(EXPECTED_DEFAULT_CONFIG)]
- }
-
- def do_GET(self):
- path = urlparse.urlparse(self.path)
- message = '\n'.join(
- self.responses.get(
- path.path, None))
- self.send_response(200)
- self.end_headers()
- self.wfile.write(message)
-
- def do_POST(self):
- form = cgi.FieldStorage(
- fp=self.rfile,
- headers=self.headers,
- environ={'REQUEST_METHOD': 'POST',
- 'CONTENT_TYPE': self.headers['Content-Type'],
- })
- data = dict(
- (key, form[key].value) for key in form.keys())
- path = urlparse.urlparse(self.path)
- message = '\n'.join(
- self.responses.get(
- path.path, ''))
-
- login = data.get('login', None)
- #password_salt = data.get('password_salt', None)
- #password_verifier = data.get('password_verifier', None)
-
- if path.geturl() == "/timeout":
- print 'timeout'
- self.send_response(200)
- self.end_headers()
- self.wfile.write(message)
- import time
- time.sleep(10)
- return
-
- ok = True if (login == "python_test_user") else False
- if ok:
- self.send_response(200)
- self.end_headers()
- self.wfile.write(message)
-
- else:
- self.send_response(500)
- self.end_headers()
- self.wfile.write(LOGIN_ERROR)
-
-
-if __name__ == "__main__":
- server = HTTPServer((HOST, PORT), request_handler)
- server.serve_forever()
diff --git a/src/leap/gui/tests/test_firstrun_login.py b/src/leap/gui/tests/test_firstrun_login.py
deleted file mode 100644
index 6c45b8ef..00000000
--- a/src/leap/gui/tests/test_firstrun_login.py
+++ /dev/null
@@ -1,212 +0,0 @@
-import sys
-import unittest
-
-import mock
-
-from leap.testing import qunittest
-#from leap.testing import pyqt
-
-from PyQt4 import QtGui
-#from PyQt4 import QtCore
-#import PyQt4.QtCore # some weirdness with mock module
-
-from PyQt4.QtTest import QTest
-from PyQt4.QtCore import Qt
-
-from leap.gui import firstrun
-
-try:
- from collections import OrderedDict
-except ImportError:
- # We must be in 2.6
- from leap.util.dicts import OrderedDict
-
-
-class TestPage(firstrun.login.LogInPage):
- pass
-
-
-class LogInPageLogicTestCase(qunittest.TestCase):
-
- # XXX can spy on signal connections
- __name__ = "register user page logic tests"
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
- self.page = TestPage(None)
- self.page.wizard = mock.MagicMock()
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
- self.page = None
-
- def test__do_checks(self):
- eq = self.assertEqual
-
- self.page.userNameLineEdit.setText('testuser@domain')
- self.page.userPasswordLineEdit.setText('testpassword')
-
- # fake register process
- with mock.patch('leap.base.auth.LeapSRPRegister') as mockAuth:
- mockSignup = mock.MagicMock()
-
- reqMockup = mock.Mock()
- # XXX should inject bad json to get error
- reqMockup.content = '{"errors": null}'
- mockSignup.register_user.return_value = (True, reqMockup)
- mockAuth.return_value = mockSignup
- checks = [x for x in self.page._do_checks()]
-
- eq(len(checks), 4)
- labels = [str(x) for (x, y), z in checks]
- eq(labels, ['head_sentinel',
- 'Resolving domain name',
- 'Validating credentials',
- 'end_sentinel'])
- progress = [y for (x, y), z in checks]
- eq(progress, [0, 20, 60, 100])
-
- # normal run, ie, no exceptions
-
- checkfuns = [z for (x, y), z in checks]
- checkusername, resolvedomain, valcreds = checkfuns[:-1]
-
- self.assertTrue(checkusername())
- #self.mocknetchecker.check_name_resolution.assert_called_with(
- #'test_provider1')
-
- self.assertTrue(resolvedomain())
- #self.mockpcertchecker.is_https_working.assert_called_with(
- #"https://test_provider1", verify=True)
-
- self.assertTrue(valcreds())
-
- # XXX missing: inject failing exceptions
- # XXX TODO make it break
-
-
-class RegisterUserPageUITestCase(qunittest.TestCase):
-
- # XXX can spy on signal connections
- __name__ = "Register User Page UI tests"
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
-
- self.pagename = "signup"
- pages = OrderedDict((
- (self.pagename, TestPage),
- ('providersetupvalidation',
- firstrun.connect.ConnectionPage)))
- self.wizard = firstrun.wizard.FirstRunWizard(None, pages_dict=pages)
- self.page = self.wizard.page(self.wizard.get_page_index(self.pagename))
-
- self.page.do_checks = mock.Mock()
-
- # wizard would do this for us
- self.page.initializePage()
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
- self.wizard = None
-
- # XXX refactor out
- def fill_field(self, field, text):
- """
- fills a field (line edit) that is passed along
- :param field: the qLineEdit
- :param text: the text to be filled
- :type field: QLineEdit widget
- :type text: str
- """
- keyp = QTest.keyPress
- field.setFocus(True)
- for c in text:
- keyp(field, c)
- self.assertEqual(field.text(), text)
-
- def del_field(self, field):
- """
- deletes entried text in
- field line edit
- :param field: the QLineEdit
- :type field: QLineEdit widget
- """
- keyp = QTest.keyPress
- for c in range(len(field.text())):
- keyp(field, Qt.Key_Backspace)
- self.assertEqual(field.text(), "")
-
- def test_buttons_disabled_until_textentry(self):
- # it's a commit button this time
- nextbutton = self.wizard.button(QtGui.QWizard.CommitButton)
-
- self.assertFalse(nextbutton.isEnabled())
-
- f_username = self.page.userNameLineEdit
- f_password = self.page.userPasswordLineEdit
-
- self.fill_field(f_username, "testuser")
- self.fill_field(f_password, "testpassword")
-
- # commit should be enabled
- # XXX Need a workaround here
- # because the isComplete is not being evaluated...
- # (no event loop running??)
- #import ipdb;ipdb.set_trace()
- #self.assertTrue(nextbutton.isEnabled())
- self.assertTrue(self.page.isComplete())
-
- self.del_field(f_username)
- self.del_field(f_password)
-
- # after rm fields commit button
- # should be disabled again
- #self.assertFalse(nextbutton.isEnabled())
- self.assertFalse(self.page.isComplete())
-
- def test_validate_page(self):
- self.assertFalse(self.page.validatePage())
- # XXX TODO MOAR CASES...
- # add errors, False
- # change done, False
- # not done, do_checks called
- # click confirm, True
- # done and do_confirm, True
-
- def test_next_id(self):
- self.assertEqual(self.page.nextId(), 1)
-
- def test_paint_event(self):
- self.page.populateErrors = mock.Mock()
- self.page.paintEvent(None)
- self.page.populateErrors.assert_called_with()
-
- def test_validation_ready(self):
- f_username = self.page.userNameLineEdit
- f_password = self.page.userPasswordLineEdit
-
- self.fill_field(f_username, "testuser")
- self.fill_field(f_password, "testpassword")
-
- self.page.done = True
- self.page.on_checks_validation_ready()
- self.assertFalse(f_username.isEnabled())
- self.assertFalse(f_password.isEnabled())
-
- self.assertEqual(self.page.validationMsg.text(),
- "Credentials validated.")
- self.assertEqual(self.page.do_confirm_next, True)
-
- def test_regex(self):
- # XXX enter invalid username with key presses
- # check text is not updated
- pass
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/gui/tests/test_firstrun_providerselect.py b/src/leap/gui/tests/test_firstrun_providerselect.py
deleted file mode 100644
index 18d89010..00000000
--- a/src/leap/gui/tests/test_firstrun_providerselect.py
+++ /dev/null
@@ -1,203 +0,0 @@
-import sys
-import unittest
-
-import mock
-
-from leap.testing import qunittest
-#from leap.testing import pyqt
-
-from PyQt4 import QtGui
-#from PyQt4 import QtCore
-#import PyQt4.QtCore # some weirdness with mock module
-
-from PyQt4.QtTest import QTest
-from PyQt4.QtCore import Qt
-
-from leap.gui import firstrun
-
-try:
- from collections import OrderedDict
-except ImportError:
- # We must be in 2.6
- from leap.util.dicts import OrderedDict
-
-
-class TestPage(firstrun.providerselect.SelectProviderPage):
- pass
-
-
-class SelectProviderPageLogicTestCase(qunittest.TestCase):
-
- # XXX can spy on signal connections
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
- self.page = TestPage(None)
- self.page.wizard = mock.MagicMock()
-
- mocknetchecker = mock.Mock()
- self.page.wizard().netchecker.return_value = mocknetchecker
- self.mocknetchecker = mocknetchecker
-
- mockpcertchecker = mock.Mock()
- self.page.wizard().providercertchecker.return_value = mockpcertchecker
- self.mockpcertchecker = mockpcertchecker
-
- mockeipconfchecker = mock.Mock()
- self.page.wizard().eipconfigchecker.return_value = mockeipconfchecker
- self.mockeipconfchecker = mockeipconfchecker
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
- self.page = None
-
- def test__do_checks(self):
- eq = self.assertEqual
-
- self.page.providerNameEdit.setText('test_provider1')
-
- checks = [x for x in self.page._do_checks()]
- eq(len(checks), 5)
- labels = [str(x) for (x, y), z in checks]
- eq(labels, ['head_sentinel',
- 'Checking if it is a valid provider',
- 'Checking for a secure connection',
- 'Getting info from the provider',
- 'end_sentinel'])
- progress = [y for (x, y), z in checks]
- eq(progress, [0, 20, 40, 80, 100])
-
- # normal run, ie, no exceptions
-
- checkfuns = [z for (x, y), z in checks]
- namecheck, httpscheck, fetchinfo = checkfuns[1:-1]
-
- self.assertTrue(namecheck())
- self.mocknetchecker.check_name_resolution.assert_called_with(
- 'test_provider1')
-
- self.assertTrue(httpscheck())
- self.mockpcertchecker.is_https_working.assert_called_with(
- "https://test_provider1", verify=True)
-
- self.assertTrue(fetchinfo())
- self.mockeipconfchecker.fetch_definition.assert_called_with(
- domain="test_provider1")
-
- # XXX missing: inject failing exceptions
- # XXX TODO make it break
-
-
-class SelectProviderPageUITestCase(qunittest.TestCase):
-
- # XXX can spy on signal connections
- __name__ = "Select Provider Page UI tests"
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
-
- self.pagename = "providerselection"
- pages = OrderedDict((
- (self.pagename, TestPage),
- ('providerinfo',
- firstrun.providerinfo.ProviderInfoPage)))
- self.wizard = firstrun.wizard.FirstRunWizard(None, pages_dict=pages)
- self.page = self.wizard.page(self.wizard.get_page_index(self.pagename))
-
- self.page.do_checks = mock.Mock()
-
- # wizard would do this for us
- self.page.initializePage()
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
- self.wizard = None
-
- def fill_provider(self):
- """
- fills provider line edit
- """
- keyp = QTest.keyPress
- pedit = self.page.providerNameEdit
- pedit.setFocus(True)
- for c in "testprovider":
- keyp(pedit, c)
- self.assertEqual(pedit.text(), "testprovider")
-
- def del_provider(self):
- """
- deletes entried provider in
- line edit
- """
- keyp = QTest.keyPress
- pedit = self.page.providerNameEdit
- for c in range(len("testprovider")):
- keyp(pedit, Qt.Key_Backspace)
- self.assertEqual(pedit.text(), "")
-
- def test_buttons_disabled_until_textentry(self):
- nextbutton = self.wizard.button(QtGui.QWizard.NextButton)
- checkbutton = self.page.providerCheckButton
-
- self.assertFalse(nextbutton.isEnabled())
- self.assertFalse(checkbutton.isEnabled())
-
- self.fill_provider()
- # checkbutton should be enabled
- self.assertTrue(checkbutton.isEnabled())
- self.assertFalse(nextbutton.isEnabled())
-
- self.del_provider()
- # after rm provider checkbutton disabled again
- self.assertFalse(checkbutton.isEnabled())
- self.assertFalse(nextbutton.isEnabled())
-
- def test_check_button_triggers_tests(self):
- checkbutton = self.page.providerCheckButton
- self.assertFalse(checkbutton.isEnabled())
- self.assertFalse(self.page.do_checks.called)
-
- self.fill_provider()
-
- self.assertTrue(checkbutton.isEnabled())
- mclick = QTest.mouseClick
- # click!
- mclick(checkbutton, Qt.LeftButton)
- self.waitFor(seconds=0.1)
- self.assertTrue(self.page.do_checks.called)
-
- # XXX
- # can play with different side_effects for do_checks mock...
- # so we can see what happens with errors and so on
-
- def test_page_completed_after_checks(self):
- nextbutton = self.wizard.button(QtGui.QWizard.NextButton)
- self.assertFalse(nextbutton.isEnabled())
-
- self.assertFalse(self.page.isComplete())
- self.fill_provider()
- # simulate checks done
- self.page.done = True
- self.page.on_checks_validation_ready()
- self.assertTrue(self.page.isComplete())
- # cannot test for nexbutton enabled
- # cause it's the the wizard loop
- # that would do that I think
-
- def test_validate_page(self):
- self.assertTrue(self.page.validatePage())
-
- def test_next_id(self):
- self.assertEqual(self.page.nextId(), 1)
-
- def test_paint_event(self):
- self.page.populateErrors = mock.Mock()
- self.page.paintEvent(None)
- self.page.populateErrors.assert_called_with()
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/gui/tests/test_firstrun_register.py b/src/leap/gui/tests/test_firstrun_register.py
deleted file mode 100644
index 9d62f808..00000000
--- a/src/leap/gui/tests/test_firstrun_register.py
+++ /dev/null
@@ -1,244 +0,0 @@
-import sys
-import unittest
-
-import mock
-
-from leap.testing import qunittest
-#from leap.testing import pyqt
-
-from PyQt4 import QtGui
-#from PyQt4 import QtCore
-#import PyQt4.QtCore # some weirdness with mock module
-
-from PyQt4.QtTest import QTest
-from PyQt4.QtCore import Qt
-
-from leap.gui import firstrun
-
-try:
- from collections import OrderedDict
-except ImportError:
- # We must be in 2.6
- from leap.util.dicts import OrderedDict
-
-
-class TestPage(firstrun.register.RegisterUserPage):
-
- def field(self, field):
- if field == "provider_domain":
- return "testprovider"
-
-
-class RegisterUserPageLogicTestCase(qunittest.TestCase):
-
- # XXX can spy on signal connections
- __name__ = "register user page logic tests"
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
- self.page = TestPage(None)
- self.page.wizard = mock.MagicMock()
-
- #mocknetchecker = mock.Mock()
- #self.page.wizard().netchecker.return_value = mocknetchecker
- #self.mocknetchecker = mocknetchecker
-#
- #mockpcertchecker = mock.Mock()
- #self.page.wizard().providercertchecker.return_value = mockpcertchecker
- #self.mockpcertchecker = mockpcertchecker
-#
- #mockeipconfchecker = mock.Mock()
- #self.page.wizard().eipconfigchecker.return_value = mockeipconfchecker
- #self.mockeipconfchecker = mockeipconfchecker
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
- self.page = None
-
- def test__do_checks(self):
- eq = self.assertEqual
-
- self.page.userNameLineEdit.setText('testuser')
- self.page.userPasswordLineEdit.setText('testpassword')
- self.page.userPassword2LineEdit.setText('testpassword')
-
- # fake register process
- with mock.patch('leap.base.auth.LeapSRPRegister') as mockAuth:
- mockSignup = mock.MagicMock()
-
- reqMockup = mock.Mock()
- # XXX should inject bad json to get error
- reqMockup.content = '{"errors": null}'
- mockSignup.register_user.return_value = (True, reqMockup)
- mockAuth.return_value = mockSignup
- checks = [x for x in self.page._do_checks()]
-
- eq(len(checks), 3)
- labels = [str(x) for (x, y), z in checks]
- eq(labels, ['head_sentinel',
- 'Registering username',
- 'end_sentinel'])
- progress = [y for (x, y), z in checks]
- eq(progress, [0, 40, 100])
-
- # normal run, ie, no exceptions
-
- checkfuns = [z for (x, y), z in checks]
- passcheck, register = checkfuns[:-1]
-
- self.assertTrue(passcheck())
- #self.mocknetchecker.check_name_resolution.assert_called_with(
- #'test_provider1')
-
- self.assertTrue(register())
- #self.mockpcertchecker.is_https_working.assert_called_with(
- #"https://test_provider1", verify=True)
-
- # XXX missing: inject failing exceptions
- # XXX TODO make it break
-
-
-class RegisterUserPageUITestCase(qunittest.TestCase):
-
- # XXX can spy on signal connections
- __name__ = "Register User Page UI tests"
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
-
- self.pagename = "signup"
- pages = OrderedDict((
- (self.pagename, TestPage),
- ('connect',
- firstrun.connect.ConnectionPage)))
- self.wizard = firstrun.wizard.FirstRunWizard(None, pages_dict=pages)
- self.page = self.wizard.page(self.wizard.get_page_index(self.pagename))
-
- self.page.do_checks = mock.Mock()
-
- # wizard would do this for us
- self.page.initializePage()
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
- self.wizard = None
-
- def fill_field(self, field, text):
- """
- fills a field (line edit) that is passed along
- :param field: the qLineEdit
- :param text: the text to be filled
- :type field: QLineEdit widget
- :type text: str
- """
- keyp = QTest.keyPress
- field.setFocus(True)
- for c in text:
- keyp(field, c)
- self.assertEqual(field.text(), text)
-
- def del_field(self, field):
- """
- deletes entried text in
- field line edit
- :param field: the QLineEdit
- :type field: QLineEdit widget
- """
- keyp = QTest.keyPress
- for c in range(len(field.text())):
- keyp(field, Qt.Key_Backspace)
- self.assertEqual(field.text(), "")
-
- def test_buttons_disabled_until_textentry(self):
- # it's a commit button this time
- nextbutton = self.wizard.button(QtGui.QWizard.CommitButton)
-
- self.assertFalse(nextbutton.isEnabled())
-
- f_username = self.page.userNameLineEdit
- f_password = self.page.userPasswordLineEdit
- f_passwor2 = self.page.userPassword2LineEdit
-
- self.fill_field(f_username, "testuser")
- self.fill_field(f_password, "testpassword")
- self.fill_field(f_passwor2, "testpassword")
-
- # commit should be enabled
- # XXX Need a workaround here
- # because the isComplete is not being evaluated...
- # (no event loop running??)
- #import ipdb;ipdb.set_trace()
- #self.assertTrue(nextbutton.isEnabled())
- self.assertTrue(self.page.isComplete())
-
- self.del_field(f_username)
- self.del_field(f_password)
- self.del_field(f_passwor2)
-
- # after rm fields commit button
- # should be disabled again
- #self.assertFalse(nextbutton.isEnabled())
- self.assertFalse(self.page.isComplete())
-
- @unittest.skip
- def test_check_button_triggers_tests(self):
- checkbutton = self.page.providerCheckButton
- self.assertFalse(checkbutton.isEnabled())
- self.assertFalse(self.page.do_checks.called)
-
- self.fill_provider()
-
- self.assertTrue(checkbutton.isEnabled())
- mclick = QTest.mouseClick
- # click!
- mclick(checkbutton, Qt.LeftButton)
- self.waitFor(seconds=0.1)
- self.assertTrue(self.page.do_checks.called)
-
- # XXX
- # can play with different side_effects for do_checks mock...
- # so we can see what happens with errors and so on
-
- def test_validate_page(self):
- self.assertFalse(self.page.validatePage())
- # XXX TODO MOAR CASES...
- # add errors, False
- # change done, False
- # not done, do_checks called
- # click confirm, True
- # done and do_confirm, True
-
- def test_next_id(self):
- self.assertEqual(self.page.nextId(), 1)
-
- def test_paint_event(self):
- self.page.populateErrors = mock.Mock()
- self.page.paintEvent(None)
- self.page.populateErrors.assert_called_with()
-
- def test_validation_ready(self):
- f_username = self.page.userNameLineEdit
- f_password = self.page.userPasswordLineEdit
- f_passwor2 = self.page.userPassword2LineEdit
-
- self.fill_field(f_username, "testuser")
- self.fill_field(f_password, "testpassword")
- self.fill_field(f_passwor2, "testpassword")
-
- self.page.done = True
- self.page.on_checks_validation_ready()
- self.assertFalse(f_username.isEnabled())
- self.assertFalse(f_password.isEnabled())
- self.assertFalse(f_passwor2.isEnabled())
-
- self.assertEqual(self.page.validationMsg.text(),
- "Registration succeeded!")
- self.assertEqual(self.page.do_confirm_next, True)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/gui/tests/test_firstrun_wizard.py b/src/leap/gui/tests/test_firstrun_wizard.py
deleted file mode 100644
index 395604d3..00000000
--- a/src/leap/gui/tests/test_firstrun_wizard.py
+++ /dev/null
@@ -1,137 +0,0 @@
-import sys
-import unittest
-
-import mock
-
-from leap.testing import qunittest
-from leap.testing import pyqt
-
-from PyQt4 import QtGui
-#from PyQt4 import QtCore
-import PyQt4.QtCore # some weirdness with mock module
-
-from PyQt4.QtTest import QTest
-#from PyQt4.QtCore import Qt
-
-from leap.gui import firstrun
-
-
-class TestWizard(firstrun.wizard.FirstRunWizard):
- pass
-
-
-PAGES_DICT = dict((
- ('intro', firstrun.intro.IntroPage),
- ('providerselection',
- firstrun.providerselect.SelectProviderPage),
- ('login', firstrun.login.LogInPage),
- ('providerinfo', firstrun.providerinfo.ProviderInfoPage),
- ('providersetupvalidation',
- firstrun.providersetup.ProviderSetupValidationPage),
- ('signup', firstrun.register.RegisterUserPage),
- ('connect',
- firstrun.connect.ConnectionPage),
- ('lastpage', firstrun.last.LastPage)
-))
-
-
-mockQSettings = mock.MagicMock()
-mockQSettings().setValue.return_value = True
-
-#PyQt4.QtCore.QSettings = mockQSettings
-
-
-class FirstRunWizardTestCase(qunittest.TestCase):
-
- # XXX can spy on signal connections
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
- self.wizard = TestWizard(None)
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
- self.wizard = None
-
- def test_defaults(self):
- self.assertEqual(self.wizard.pages_dict, PAGES_DICT)
-
- @mock.patch('PyQt4.QtCore.QSettings', mockQSettings)
- def test_accept(self):
- """
- test the main accept method
- that gets called when user has gone
- thru all the wizard and click on finish button
- """
-
- self.wizard.success_cb = mock.Mock()
- self.wizard.success_cb.return_value = True
-
- # dummy values; we inject them in the field
- # mocks (where wizard gets them) and then
- # we check that they are passed to QSettings.setValue
- field_returns = ["testuser", "1234", "testprovider", True]
-
- def field_side_effects(*args):
- return field_returns.pop(0)
-
- self.wizard.field = mock.Mock(side_effect=field_side_effects)
- self.wizard.get_random_str = mock.Mock()
- RANDOMSTR = "thisisarandomstringTM"
- self.wizard.get_random_str.return_value = RANDOMSTR
-
- # mocked settings (see decorator on this method)
- mqs = PyQt4.QtCore.QSettings
-
- # go! call accept...
- self.wizard.accept()
-
- # did settings().setValue get called with the proper
- # arguments?
- call = mock.call
- calls = [call("FirstRunWizardDone", True),
- call("provider_domain", "testprovider"),
- call("remember_user_and_pass", True),
- call("username", "testuser@testprovider"),
- call("testprovider_seed", RANDOMSTR)]
- mqs().setValue.assert_has_calls(calls, any_order=True)
-
- # assert success callback is success oh boy
- self.wizard.success_cb.assert_called_with()
-
- def test_random_str(self):
- r = self.wizard.get_random_str(42)
- self.assertTrue(len(r) == 42)
-
- def test_page_index(self):
- """
- we test both the get_page_index function
- and the correct ordering of names
- """
- # remember it's implemented as an ordered dict
-
- pagenames = ('intro', 'providerselection', 'login', 'providerinfo',
- 'providersetupvalidation', 'signup', 'connect',
- 'lastpage')
- eq = self.assertEqual
- w = self.wizard
- for index, name in enumerate(pagenames):
- eq(w.get_page_index(name), index)
-
- def test_validation_errors(self):
- """
- tests getters and setters for validation errors
- """
- page = "testpage"
- eq = self.assertEqual
- w = self.wizard
- eq(w.get_validation_error(page), None)
- w.set_validation_error(page, "error")
- eq(w.get_validation_error(page), "error")
- w.clean_validation_error(page)
- eq(w.get_validation_error(page), None)
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/gui/tests/test_mainwindow_rc.py b/src/leap/gui/tests/test_mainwindow_rc.py
deleted file mode 100644
index 9f5172f7..00000000
--- a/src/leap/gui/tests/test_mainwindow_rc.py
+++ /dev/null
@@ -1,32 +0,0 @@
-import unittest
-import hashlib
-
-try:
- import sip
- sip.setapi('QVariant', 2)
-except ValueError:
- pass
-
-from leap.gui import mainwindow_rc
-
-# I have to admit that there's something
-# perverse in testing this.
-# Even though, I still think that it _is_ a good idea
-# to put a check to avoid non-updated resources files.
-
-# so, if you came here because an updated resource
-# did break a test, what you have to do is getting
-# the md5 hash of your qt_resource_data and change it here.
-
-# annoying? yep. try making a script for that :P
-
-
-class MainWindowResourcesTest(unittest.TestCase):
-
- def test_mainwindow_resources_hash(self):
- self.assertEqual(
- hashlib.md5(mainwindow_rc.qt_resource_data).hexdigest(),
- 'e04cb467985ba38b9eb91e7689f9458f')
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/gui/tests/test_progress.py b/src/leap/gui/tests/test_progress.py
deleted file mode 100644
index 1f9f9e38..00000000
--- a/src/leap/gui/tests/test_progress.py
+++ /dev/null
@@ -1,449 +0,0 @@
-from collections import namedtuple
-import sys
-import unittest
-import Queue
-
-import mock
-
-from leap.testing import qunittest
-from leap.testing import pyqt
-
-from PyQt4 import QtGui
-from PyQt4 import QtCore
-from PyQt4.QtTest import QTest
-from PyQt4.QtCore import Qt
-
-from leap.gui import progress
-
-
-class ProgressStepTestCase(unittest.TestCase):
-
- def test_step_attrs(self):
- ps = progress.ProgressStep
- step = ps('test', False, 1)
- # instance
- self.assertEqual(step.index, 1)
- self.assertEqual(step.name, "test")
- self.assertEqual(step.done, False)
- step = ps('test2', True, 2)
- self.assertEqual(step.index, 2)
- self.assertEqual(step.name, "test2")
- self.assertEqual(step.done, True)
-
- # class methods and attrs
- self.assertEqual(ps.columns(), ('name', 'done'))
- self.assertEqual(ps.NAME, 0)
- self.assertEqual(ps.DONE, 1)
-
-
-class ProgressStepContainerTestCase(unittest.TestCase):
- def setUp(self):
- self.psc = progress.ProgressStepContainer()
-
- def addSteps(self, number):
- Step = progress.ProgressStep
- for n in range(number):
- self.psc.addStep(Step("%s" % n, False, n))
-
- def test_attrs(self):
- self.assertEqual(self.psc.columns,
- ('name', 'done'))
-
- def test_add_steps(self):
- Step = progress.ProgressStep
- self.assertTrue(len(self.psc) == 0)
- self.psc.addStep(Step('one', False, 0))
- self.assertTrue(len(self.psc) == 1)
- self.psc.addStep(Step('two', False, 1))
- self.assertTrue(len(self.psc) == 2)
-
- def test_del_all_steps(self):
- self.assertTrue(len(self.psc) == 0)
- self.addSteps(5)
- self.assertTrue(len(self.psc) == 5)
- self.psc.removeAllSteps()
- self.assertTrue(len(self.psc) == 0)
-
- def test_del_step(self):
- Step = progress.ProgressStep
- self.addSteps(5)
- self.assertTrue(len(self.psc) == 5)
- self.psc.removeStep(self.psc.step(4))
- self.assertTrue(len(self.psc) == 4)
- self.psc.removeStep(self.psc.step(4))
- self.psc.removeStep(Step('none', False, 5))
- self.psc.removeStep(self.psc.step(4))
-
- def test_iter(self):
- self.addSteps(10)
- self.assertEqual(
- [x.index for x in self.psc],
- [x for x in range(10)])
-
-
-class StepsTableWidgetTestCase(unittest.TestCase):
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
- self.stw = progress.StepsTableWidget()
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
-
- def test_defaults(self):
- self.assertTrue(isinstance(self.stw, QtGui.QTableWidget))
- self.assertEqual(self.stw.focusPolicy(), 0)
-
-
-class TestWithStepsClass(QtGui.QWidget, progress.WithStepsMixIn):
-
- def __init__(self, parent=None):
- super(TestWithStepsClass, self).__init__(parent=parent)
- self.setupStepsProcessingQueue()
- self.statuses = []
- self.current_page = "testpage"
-
- def onStepStatusChanged(self, *args):
- """
- blank out this gui method
- that will add status lines
- """
- self.statuses.append(args)
-
-
-class WithStepsMixInTestCase(qunittest.TestCase):
-
- TIMER_WAIT = 2 * progress.WithStepsMixIn.STEPS_TIMER_MS / 1000.0
-
- # XXX can spy on signal connections
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
- self.stepy = TestWithStepsClass()
- #self.connects = []
- #pyqt.enableSignalDebugging(
- #connectCall=lambda *args: self.connects.append(args))
- #self.assertEqual(self.connects, [])
- #self.stepy.stepscheck_timer.timeout.disconnect(
- #self.stepy.processStepsQueue)
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
-
- def test_has_queue(self):
- s = self.stepy
- self.assertTrue(hasattr(s, 'steps_queue'))
- self.assertTrue(isinstance(s.steps_queue, Queue.Queue))
- self.assertTrue(isinstance(s.stepscheck_timer, QtCore.QTimer))
-
- def test_do_checks_delegation(self):
- s = self.stepy
-
- _do_checks = mock.Mock()
- _do_checks.return_value = (
- (("test", 0), lambda: None),
- (("test", 0), lambda: None))
- s._do_checks = _do_checks
- s.do_checks()
- self.waitFor(seconds=self.TIMER_WAIT)
- _do_checks.assert_called_with()
- self.assertEqual(len(s.statuses), 2)
-
- # test that a failed test interrupts the run
-
- s.statuses = []
- _do_checks = mock.Mock()
- _do_checks.return_value = (
- (("test", 0), lambda: None),
- (("test", 0), lambda: False),
- (("test", 0), lambda: None))
- s._do_checks = _do_checks
- s.do_checks()
- self.waitFor(seconds=self.TIMER_WAIT)
- _do_checks.assert_called_with()
- self.assertEqual(len(s.statuses), 2)
-
- def test_process_queue(self):
- s = self.stepy
- q = s.steps_queue
- s.set_failed_icon = mock.MagicMock()
- with self.assertRaises(AssertionError):
- q.put('foo')
- self.waitFor(seconds=self.TIMER_WAIT)
- s.set_failed_icon.assert_called_with()
- q.put("failed")
- self.waitFor(seconds=self.TIMER_WAIT)
- s.set_failed_icon.assert_called_with()
-
- def test_on_checks_validation_ready_called(self):
- s = self.stepy
- s.on_checks_validation_ready = mock.MagicMock()
-
- _do_checks = mock.Mock()
- _do_checks.return_value = (
- (("test", 0), lambda: None),)
- s._do_checks = _do_checks
- s.do_checks()
-
- self.waitFor(seconds=self.TIMER_WAIT)
- s.on_checks_validation_ready.assert_called_with()
-
- def test_fail(self):
- s = self.stepy
-
- s.wizard = mock.Mock()
- wizard = s.wizard.return_value
- wizard.set_validation_error.return_value = True
- s.completeChanged = mock.Mock()
- s.completeChanged.emit.return_value = True
-
- self.assertFalse(s.fail(err="foo"))
- self.waitFor(seconds=self.TIMER_WAIT)
- wizard.set_validation_error.assert_called_with('testpage', 'foo')
- s.completeChanged.emit.assert_called_with()
-
- # with no args
- s.wizard = mock.Mock()
- wizard = s.wizard.return_value
- wizard.set_validation_error.return_value = True
- s.completeChanged = mock.Mock()
- s.completeChanged.emit.return_value = True
-
- self.assertFalse(s.fail())
- self.waitFor(seconds=self.TIMER_WAIT)
- with self.assertRaises(AssertionError):
- wizard.set_validation_error.assert_called_with()
- s.completeChanged.emit.assert_called_with()
-
- def test_done(self):
- s = self.stepy
- s.done = False
-
- s.completeChanged = mock.Mock()
- s.completeChanged.emit.return_value = True
-
- self.assertFalse(s.is_done())
- s.set_done()
- self.assertTrue(s.is_done())
- s.completeChanged.emit.assert_called_with()
-
- s.completeChanged = mock.Mock()
- s.completeChanged.emit.return_value = True
- s.set_undone()
- self.assertFalse(s.is_done())
-
- def test_back_and_next(self):
- s = self.stepy
- s.wizard = mock.Mock()
- wizard = s.wizard.return_value
- wizard.back.return_value = True
- wizard.next.return_value = True
- s.go_back()
- wizard.back.assert_called_with()
- s.go_next()
- wizard.next.assert_called_with()
-
- def test_on_step_statuschanged_slot(self):
- s = self.stepy
- s.onStepStatusChanged = progress.WithStepsMixIn.onStepStatusChanged
- s.add_status_line = mock.Mock()
- s.set_checked_icon = mock.Mock()
- s.progress = mock.Mock()
- s.progress.setValue.return_value = True
- s.progress.update.return_value = True
-
- s.onStepStatusChanged(s, "end_sentinel")
- s.set_checked_icon.assert_called_with()
-
- s.onStepStatusChanged(s, "foo")
- s.add_status_line.assert_called_with("foo")
-
- s.onStepStatusChanged(s, "bar", 42)
- s.progress.setValue.assert_called_with(42)
- s.progress.update.assert_called_with()
-
- def test_steps_and_errors(self):
- s = self.stepy
- s.setupSteps()
- self.assertTrue(isinstance(s.steps, progress.ProgressStepContainer))
- self.assertEqual(s.errors, {})
- s.set_error('fooerror', 'barerror')
- self.assertEqual(s.errors, {'fooerror': 'barerror'})
- s.set_error('2', 42)
- self.assertEqual(s.errors, {'fooerror': 'barerror', '2': 42})
- fe = s.pop_first_error()
- self.assertEqual(fe, ('fooerror', 'barerror'))
- self.assertEqual(s.errors, {'2': 42})
- s.clean_errors()
- self.assertEqual(s.errors, {})
-
- def test_launch_chechs_slot(self):
- s = self.stepy
- s.do_checks = mock.Mock()
- s.launch_checks()
- s.do_checks.assert_called_with()
-
- def test_clean_wizard_errors(self):
- s = self.stepy
- s.wizard = mock.Mock()
- wizard = s.wizard.return_value
- wizard.set_validation_error.return_value = True
- s.clean_wizard_errors(pagename="foopage")
- wizard.set_validation_error.assert_called_with("foopage", None)
-
- def test_clear_table(self):
- s = self.stepy
- s.stepsTableWidget = mock.Mock()
- s.stepsTableWidget.clearContents.return_value = True
- s.clearTable()
- s.stepsTableWidget.clearContents.assert_called_with()
-
- def test_populate_steps_table(self):
- s = self.stepy
- Step = namedtuple('Step', ['name', 'done'])
-
- class Steps(object):
- columns = ("name", "done")
- _items = (Step('step1', False), Step('step2', False))
-
- def __len__(self):
- return 2
-
- def __iter__(self):
- for i in self._items:
- yield i
-
- s.steps = Steps()
-
- s.stepsTableWidget = mock.Mock()
- s.stepsTableWidget.setItem.return_value = True
- s.resizeTable = mock.Mock()
- s.update = mock.Mock()
- s.populateStepsTable()
- s.update.assert_called_with()
- s.resizeTable.assert_called_with()
-
- # assert stepsTableWidget.setItem called ...
- # we do not want to get into the actual
- # <QTableWidgetItem object at 0x92a565c>
- call_list = s.stepsTableWidget.setItem.call_args_list
- indexes = [(y, z) for y, z, xx in [x[0] for x in call_list]]
- self.assertEqual(indexes,
- [(0, 0), (0, 1), (1, 0), (1, 1)])
-
- def test_add_status_line(self):
- s = self.stepy
- s.steps = progress.ProgressStepContainer()
- s.stepsTableWidget = mock.Mock()
- s.stepsTableWidget.width.return_value = 100
- s.set_item = mock.Mock()
- s.set_item_icon = mock.Mock()
- s.add_status_line("new status")
- s.set_item_icon.assert_called_with(current=False)
-
- def test_set_item_icon(self):
- s = self.stepy
- s.steps = progress.ProgressStepContainer()
- s.stepsTableWidget = mock.Mock()
- s.stepsTableWidget.setCellWidget.return_value = True
- s.stepsTableWidget.width.return_value = 100
- #s.set_item = mock.Mock()
- #s.set_item_icon = mock.Mock()
- s.add_status_line("new status")
- s.add_status_line("new 2 status")
- s.add_status_line("new 3 status")
- call_list = s.stepsTableWidget.setCellWidget.call_args_list
- indexes = [(y, z) for y, z, xx in [x[0] for x in call_list]]
- self.assertEqual(
- indexes,
- [(0, 1), (-1, 1), (1, 1), (0, 1), (2, 1), (1, 1)])
-
-
-class TestInlineValidationPage(progress.InlineValidationPage):
- pass
-
-
-class InlineValidationPageTestCase(unittest.TestCase):
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
- self.page = TestInlineValidationPage()
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
-
- def test_defaults(self):
- self.assertFalse(self.page.done)
- # if setupProcessingQueue was called
- self.assertTrue(isinstance(self.page.stepscheck_timer, QtCore.QTimer))
- self.assertTrue(isinstance(self.page.steps_queue, Queue.Queue))
-
- def test_validation_frame(self):
- # test frame creation
- self.page.stepsTableWidget = progress.StepsTableWidget(
- parent=self.page)
- self.page.setupValidationFrame()
- self.assertTrue(isinstance(self.page.valFrame, QtGui.QFrame))
-
- # test show steps calls frame.show
- self.page.valFrame = mock.Mock()
- self.page.valFrame.show.return_value = True
- self.page.showStepsFrame()
- self.page.valFrame.show.assert_called_with()
-
-
-class TestValidationPage(progress.ValidationPage):
- pass
-
-
-class ValidationPageTestCase(unittest.TestCase):
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- QtGui.qApp = self.app
- self.page = TestValidationPage()
-
- def tearDown(self):
- QtGui.qApp = None
- self.app = None
-
- def test_defaults(self):
- self.assertFalse(self.page.done)
- # if setupProcessingQueue was called
- self.assertTrue(isinstance(self.page.timer, QtCore.QTimer))
- self.assertTrue(isinstance(self.page.stepscheck_timer, QtCore.QTimer))
- self.assertTrue(isinstance(self.page.steps_queue, Queue.Queue))
-
- def test_is_complete(self):
- self.assertFalse(self.page.isComplete())
- self.page.done = True
- self.assertTrue(self.page.isComplete())
- self.page.done = False
- self.assertFalse(self.page.isComplete())
-
- def test_show_hide_progress(self):
- p = self.page
- p.progress = mock.Mock()
- p.progress.show.return_code = True
- p.show_progress()
- p.progress.show.assert_called_with()
- p.progress.hide.return_code = True
- p.hide_progress()
- p.progress.hide.assert_called_with()
-
- def test_initialize_page(self):
- p = self.page
- p.timer = mock.Mock()
- p.timer.singleShot.return_code = True
- p.initializePage()
- p.timer.singleShot.assert_called_with(0, p.do_checks)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/gui/tests/test_threads.py b/src/leap/gui/tests/test_threads.py
deleted file mode 100644
index 06c19606..00000000
--- a/src/leap/gui/tests/test_threads.py
+++ /dev/null
@@ -1,27 +0,0 @@
-import unittest
-
-import mock
-from leap.gui import threads
-
-
-class FunThreadTestCase(unittest.TestCase):
-
- def setUp(self):
- self.fun = mock.MagicMock()
- self.fun.return_value = "foo"
- self.t = threads.FunThread(fun=self.fun)
-
- def test_thread(self):
- self.t.begin()
- self.t.wait()
- self.fun.assert_called()
- del self.t
-
- def test_run(self):
- # this is called by PyQt
- self.t.run()
- del self.t
- self.fun.assert_called()
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/gui/threads.py b/src/leap/gui/threads.py
deleted file mode 100644
index 8aad8866..00000000
--- a/src/leap/gui/threads.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from PyQt4 import QtCore
-
-
-class FunThread(QtCore.QThread):
-
- def __init__(self, fun=None, parent=None):
-
- QtCore.QThread.__init__(self, parent)
- self.exiting = False
- self.fun = fun
-
- def __del__(self):
- self.exiting = True
- self.wait()
-
- def run(self):
- if self.fun:
- self.fun()
-
- def begin(self):
- self.start()
diff --git a/src/leap/gui/utils.py b/src/leap/gui/utils.py
deleted file mode 100644
index f91ac3ef..00000000
--- a/src/leap/gui/utils.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""
-utility functions to work with gui objects
-"""
-from PyQt4 import QtCore
-
-
-def layout_widgets(layout):
- """
- return a generator with all widgets in a layout
- """
- return (layout.itemAt(i) for i in range(layout.count()))
-
-
-DELAY_MSECS = 50
-
-
-def delay(obj, method_str=None, call_args=None):
- """
- Triggers a function or slot with a small delay.
- this is a mainly a hack to get responsiveness in the ui
- in cases in which the event loop freezes and the task
- is not heavy enough to setup a processing queue.
- """
- if callable(obj) and not method_str:
- fun = lambda: obj()
-
- if method_str:
- invoke = QtCore.QMetaObject.invokeMethod
- if call_args:
- fun = lambda: invoke(obj, method_str, call_args)
- else:
- fun = lambda: invoke(obj, method_str)
-
- QtCore.QTimer().singleShot(DELAY_MSECS, fun)
diff --git a/src/leap/soledad/README b/src/leap/soledad/README
deleted file mode 100644
index b14d5932..00000000
--- a/src/leap/soledad/README
+++ /dev/null
@@ -1,35 +0,0 @@
-Soledad -- Synchronization Of Locally Encrypted Data Among Devices
-==================================================================
-
-This software is under development.
-
-Dependencies
-------------
-
-Soledad depends on the following python libraries:
-
- * u1db 0.1.4 [1]
- * python-gnupg 0.3.1 [2]
- * CouchDB 0.8 [3]
- * hmac 20101005 [4]
-
-[1] http://pypi.python.org/pypi/u1db/0.1.4
-[2] http://pypi.python.org/pypi/python-gnupg/0.3.1
-[3] http://pypi.python.org/pypi/CouchDB/0.8
-[4] http://pypi.python.org/pypi/hmac/20101005
-
-
-Tests
------
-
-Soledad's tests should be run with nose2, like this:
-
- nose2 leap.soledad.tests
-
-Right now, there are 3 conditions that have to be met for all Soledad tests to
-pass without problems:
-
- 1. Use nose2.
- 2. Have an http CouchDB instance running on `localhost:5984`.
- 3. Have sqlcipher configured (using LD_PRELOAD or LD_LIBRARY_CONFIG to point
- to the place where libsqlite3.so.0 is located).
diff --git a/src/leap/soledad/__init__.py b/src/leap/soledad/__init__.py
deleted file mode 100644
index 4b7a12df..00000000
--- a/src/leap/soledad/__init__.py
+++ /dev/null
@@ -1,221 +0,0 @@
-# License?
-
-"""A U1DB implementation for using Object Stores as its persistence layer."""
-
-import os
-import string
-import random
-import hmac
-from leap.soledad.backends import sqlcipher
-from leap.soledad.util import GPGWrapper
-
-
-class Soledad(object):
-
- # paths
- PREFIX = os.environ['HOME'] + '/.config/leap/soledad'
- SECRET_PATH = PREFIX + '/secret.gpg'
- GNUPG_HOME = PREFIX + '/gnupg'
- LOCAL_DB_PATH = PREFIX + '/soledad.u1db'
-
- # other configs
- SECRET_LENGTH = 50
-
- def __init__(self, user_email, gpghome=None, initialize=True,
- prefix=None, secret_path=None, local_db_path=None):
- self._user_email = user_email
- self.PREFIX = prefix or self.PREFIX
- self.SECRET_PATH = secret_path or self.SECRET_PATH
- self.LOCAL_DB_PATH = local_db_path or self.LOCAL_DB_PATH
- if not os.path.isdir(self.PREFIX):
- os.makedirs(self.PREFIX)
- self._gpg = GPGWrapper(gpghome=(gpghome or self.GNUPG_HOME))
- if initialize:
- self._initialize()
-
- def _initialize(self):
- # load/generate OpenPGP keypair
- if not self._has_openpgp_keypair():
- self._gen_openpgp_keypair()
- self._load_openpgp_keypair()
- # load/generate secret
- if not self._has_secret():
- self._gen_secret()
- self._load_secret()
- # instantiate u1db
- # TODO: verify if secret for sqlcipher should be the same as the one
- # for symmetric encryption.
- self._db = sqlcipher.open(self.LOCAL_DB_PATH, True, self._secret,
- soledad=self)
-
- def close(self):
- self._db.close()
-
- #-------------------------------------------------------------------------
- # Management of secret for symmetric encryption
- #-------------------------------------------------------------------------
-
- def _has_secret(self):
- """
- Verify if secret for symmetric encryption exists on local encrypted
- file.
- """
- # TODO: verify if file is a GPG-encrypted file and if we have the
- # corresponding private key for decryption.
- if os.path.isfile(self.SECRET_PATH):
- return True
- return False
-
- def _load_secret(self):
- """
- Load secret for symmetric encryption from local encrypted file.
- """
- try:
- with open(self.SECRET_PATH) as f:
- self._secret = str(self._gpg.decrypt(f.read()))
- except IOError:
- raise IOError('Failed to open secret file %s.' % self.SECRET_PATH)
-
- def _gen_secret(self):
- """
- Generate a secret for symmetric encryption and store in a local
- encrypted file.
- """
- self._secret = ''.join(random.choice(string.ascii_uppercase +
- string.digits) for x in
- range(self.SECRET_LENGTH))
- ciphertext = self._gpg.encrypt(self._secret, self._fingerprint,
- self._fingerprint)
- f = open(self.SECRET_PATH, 'w')
- f.write(str(ciphertext))
- f.close()
-
- #-------------------------------------------------------------------------
- # Management of OpenPGP keypair
- #-------------------------------------------------------------------------
-
- def _has_openpgp_keypair(self):
- """
- Verify if there exists an OpenPGP keypair for this user.
- """
- # TODO: verify if we have the corresponding private key.
- try:
- self._gpg.find_key(self._user_email)
- return True
- except LookupError:
- return False
-
- def _gen_openpgp_keypair(self):
- """
- Generate an OpenPGP keypair for this user.
- """
- params = self._gpg.gen_key_input(
- key_type='RSA',
- key_length=4096,
- name_real=self._user_email,
- name_email=self._user_email,
- name_comment='Generated by LEAP Soledad.')
- self._gpg.gen_key(params)
-
- def _load_openpgp_keypair(self):
- """
- Find fingerprint for this user's OpenPGP keypair.
- """
- self._fingerprint = self._gpg.find_key(self._user_email)['fingerprint']
-
- def publish_pubkey(self, keyserver):
- """
- Publish OpenPGP public key to a keyserver.
- """
- # TODO: this has to talk to LEAP's Nickserver.
- pass
-
- #-------------------------------------------------------------------------
- # Data encryption and decryption
- #-------------------------------------------------------------------------
-
- def encrypt(self, data, sign=None, passphrase=None, symmetric=False):
- """
- Encrypt data.
- """
- return str(self._gpg.encrypt(data, self._fingerprint, sign=sign,
- passphrase=passphrase,
- symmetric=symmetric))
-
- def encrypt_symmetric(self, doc_id, data, sign=None):
- """
- Encrypt data using symmetric secret.
- """
- h = hmac.new(self._secret, doc_id).hexdigest()
- return self.encrypt(data, sign=sign, passphrase=h, symmetric=True)
-
- def decrypt(self, data, passphrase=None, symmetric=False):
- """
- Decrypt data.
- """
- return str(self._gpg.decrypt(data, passphrase=passphrase))
-
- def decrypt_symmetric(self, doc_id, data):
- """
- Decrypt data using symmetric secret.
- """
- h = hmac.new(self._secret, doc_id).hexdigest()
- return self.decrypt(data, passphrase=h)
-
- #-------------------------------------------------------------------------
- # Document storage, retrieval and sync
- #-------------------------------------------------------------------------
-
- def put_doc(self, doc):
- """
- Update a document in the local encrypted database.
- """
- return self._db.put_doc(doc)
-
- def delete_doc(self, doc):
- """
- Delete a document from the local encrypted database.
- """
- return self._db.delete_doc(doc)
-
- def get_doc(self, doc_id, include_deleted=False):
- """
- Retrieve a document from the local encrypted database.
- """
- return self._db.get_doc(doc_id, include_deleted=include_deleted)
-
- def get_docs(self, doc_ids, check_for_conflicts=True,
- include_deleted=False):
- """
- Get the content for many documents.
- """
- return self._db.get_docs(doc_ids,
- check_for_conflicts=check_for_conflicts,
- include_deleted=include_deleted)
-
- def create_doc(self, content, doc_id=None):
- """
- Create a new document in the local encrypted database.
- """
- return self._db.create_doc(content, doc_id=doc_id)
-
- def get_doc_conflicts(self, doc_id):
- """
- Get the list of conflicts for the given document.
- """
- return self._db.get_doc_conflicts(doc_id)
-
- def resolve_doc(self, doc, conflicted_doc_revs):
- """
- Mark a document as no longer conflicted.
- """
- return self._db.resolve_doc(doc, conflicted_doc_revs)
-
- def sync(self, url):
- """
- Synchronize the local encrypted database with LEAP server.
- """
- # TODO: create authentication scheme for sync with server.
- return self._db.sync(url, creds=None, autocreate=True)
-
-__all__ = ['util']
diff --git a/src/leap/soledad/backends/__init__.py b/src/leap/soledad/backends/__init__.py
deleted file mode 100644
index 72907f37..00000000
--- a/src/leap/soledad/backends/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-import objectstore
-
-
-__all__ = [
- 'objectstore']
diff --git a/src/leap/soledad/backends/couch.py b/src/leap/soledad/backends/couch.py
deleted file mode 100644
index d349efaf..00000000
--- a/src/leap/soledad/backends/couch.py
+++ /dev/null
@@ -1,269 +0,0 @@
-# general imports
-import uuid
-from base64 import b64encode, b64decode
-import re
-# u1db
-from u1db import errors
-from u1db.sync import LocalSyncTarget
-from u1db.backends.inmemory import InMemoryIndex
-from u1db.remote.server_state import ServerState
-from u1db.errors import DatabaseDoesNotExist
-# couchdb
-from couchdb.client import Server, Document as CouchDocument
-from couchdb.http import ResourceNotFound
-# leap
-from leap.soledad.backends.objectstore import ObjectStore
-from leap.soledad.backends.leap_backend import LeapDocument
-
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-
-class InvalidURLError(Exception):
- pass
-
-
-class CouchDatabase(ObjectStore):
- """A U1DB implementation that uses Couch as its persistence layer."""
-
- @classmethod
- def open_database(cls, url, create):
- # get database from url
- m = re.match('(^https?://[^/]+)/(.+)$', url)
- if not m:
- raise InvalidURLError
- url = m.group(1)
- dbname = m.group(2)
- server = Server(url=url)
- try:
- server[dbname]
- except ResourceNotFound:
- if not create:
- raise DatabaseDoesNotExist()
- return cls(url, dbname)
-
- def __init__(self, url, database, replica_uid=None, full_commit=True,
- session=None):
- """Create a new Couch data container."""
- self._url = url
- self._full_commit = full_commit
- self._session = session
- self._server = Server(url=self._url,
- full_commit=self._full_commit,
- session=self._session)
- self._dbname = database
- # this will ensure that transaction and sync logs exist and are
- # up-to-date.
- try:
- self._database = self._server[database]
- except ResourceNotFound:
- self._server.create(database)
- self._database = self._server[database]
- super(CouchDatabase, self).__init__(replica_uid=replica_uid,
- document_factory=LeapDocument)
-
- #-------------------------------------------------------------------------
- # methods from Database
- #-------------------------------------------------------------------------
-
- def _get_doc(self, doc_id, check_for_conflicts=False):
- """
- Get just the document content, without fancy handling.
- """
- cdoc = self._database.get(doc_id)
- if cdoc is None:
- return None
- has_conflicts = False
- if check_for_conflicts:
- has_conflicts = self._has_conflicts(doc_id)
- doc = self._factory(
- doc_id=doc_id,
- rev=cdoc['u1db_rev'],
- has_conflicts=has_conflicts)
- contents = self._database.get_attachment(cdoc, 'u1db_json')
- if contents:
- doc.content = json.loads(contents.getvalue())
- else:
- doc.make_tombstone()
- return doc
-
- def get_all_docs(self, include_deleted=False):
- """Get all documents from the database."""
- generation = self._get_generation()
- results = []
- for doc_id in self._database:
- if doc_id == self.U1DB_DATA_DOC_ID:
- continue
- doc = self._get_doc(doc_id, check_for_conflicts=True)
- if doc.content is None and not include_deleted:
- continue
- results.append(doc)
- return (generation, results)
-
- def _put_doc(self, doc):
- # prepare couch's Document
- cdoc = CouchDocument()
- cdoc['_id'] = doc.doc_id
- # we have to guarantee that couch's _rev is cosistent
- old_cdoc = self._database.get(doc.doc_id)
- if old_cdoc is not None:
- cdoc['_rev'] = old_cdoc['_rev']
- # store u1db's rev
- cdoc['u1db_rev'] = doc.rev
- # save doc in db
- self._database.save(cdoc)
- # store u1db's content as json string
- if not doc.is_tombstone():
- self._database.put_attachment(cdoc, doc.get_json(),
- filename='u1db_json')
- else:
- self._database.delete_attachment(cdoc, 'u1db_json')
-
- def get_sync_target(self):
- return CouchSyncTarget(self)
-
- def create_index(self, index_name, *index_expressions):
- if index_name in self._indexes:
- if self._indexes[index_name]._definition == list(
- index_expressions):
- return
- raise errors.IndexNameTakenError
- index = InMemoryIndex(index_name, list(index_expressions))
- for doc_id in self._database:
- if doc_id == self.U1DB_DATA_DOC_ID:
- continue
- doc = self._get_doc(doc_id)
- if doc.content is not None:
- index.add_json(doc_id, doc.get_json())
- self._indexes[index_name] = index
- # save data in object store
- self._set_u1db_data()
-
- def close(self):
- # TODO: fix this method so the connection is properly closed and
- # test_close (+tearDown, which deletes the db) works without problems.
- self._url = None
- self._full_commit = None
- self._session = None
- #self._server = None
- self._database = None
- return True
-
- def sync(self, url, creds=None, autocreate=True):
- from u1db.sync import Synchronizer
- return Synchronizer(self, CouchSyncTarget(url, creds=creds)).sync(
- autocreate=autocreate)
-
- #-------------------------------------------------------------------------
- # methods from ObjectStore
- #-------------------------------------------------------------------------
-
- def _init_u1db_data(self):
- if self._replica_uid is None:
- self._replica_uid = uuid.uuid4().hex
- doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID)
- doc.content = {'transaction_log': [],
- 'conflicts': b64encode(json.dumps({})),
- 'other_generations': {},
- 'indexes': b64encode(json.dumps({})),
- 'replica_uid': self._replica_uid}
- self._put_doc(doc)
-
- def _get_u1db_data(self):
- # retrieve u1db data from couch db
- cdoc = self._database.get(self.U1DB_DATA_DOC_ID)
- jsonstr = self._database.get_attachment(cdoc, 'u1db_json').getvalue()
- content = json.loads(jsonstr)
- # set u1db database info
- #self._sync_log = content['sync_log']
- self._transaction_log = content['transaction_log']
- self._conflicts = json.loads(b64decode(content['conflicts']))
- self._other_generations = content['other_generations']
- self._indexes = self._load_indexes_from_json(
- b64decode(content['indexes']))
- self._replica_uid = content['replica_uid']
- # save couch _rev
- self._couch_rev = cdoc['_rev']
-
- def _set_u1db_data(self):
- doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID)
- doc.content = {
- 'transaction_log': self._transaction_log,
- # Here, the b64 encode ensures that document content
- # does not cause strange behaviour in couchdb because
- # of encoding.
- 'conflicts': b64encode(json.dumps(self._conflicts)),
- 'other_generations': self._other_generations,
- 'indexes': b64encode(self._dump_indexes_as_json()),
- 'replica_uid': self._replica_uid,
- '_rev': self._couch_rev}
- self._put_doc(doc)
-
- #-------------------------------------------------------------------------
- # Couch specific methods
- #-------------------------------------------------------------------------
-
- def delete_database(self):
- del(self._server[self._dbname])
-
- def _dump_indexes_as_json(self):
- indexes = {}
- for name, idx in self._indexes.iteritems():
- indexes[name] = {}
- for attr in ['name', 'definition', 'values']:
- indexes[name][attr] = getattr(idx, '_' + attr)
- return json.dumps(indexes)
-
- def _load_indexes_from_json(self, indexes):
- dict = {}
- for name, idx_dict in json.loads(indexes).iteritems():
- idx = InMemoryIndex(name, idx_dict['definition'])
- idx._values = idx_dict['values']
- dict[name] = idx
- return dict
-
-
-class CouchSyncTarget(LocalSyncTarget):
-
- def get_sync_info(self, source_replica_uid):
- source_gen, source_trans_id = self._db._get_replica_gen_and_trans_id(
- source_replica_uid)
- my_gen, my_trans_id = self._db._get_generation_info()
- return (
- self._db._replica_uid, my_gen, my_trans_id, source_gen,
- source_trans_id)
-
- def record_sync_info(self, source_replica_uid, source_replica_generation,
- source_replica_transaction_id):
- if self._trace_hook:
- self._trace_hook('record_sync_info')
- self._db._set_replica_gen_and_trans_id(
- source_replica_uid, source_replica_generation,
- source_replica_transaction_id)
-
-
-class CouchServerState(ServerState):
- """
- Inteface of the WSGI server with the CouchDB backend.
- """
-
- def __init__(self, couch_url):
- self.couch_url = couch_url
-
- def open_database(self, dbname):
- # TODO: open couch
- from leap.soledad.backends.couch import CouchDatabase
- return CouchDatabase.open_database(self.couch_url + '/' + dbname,
- create=False)
-
- def ensure_database(self, dbname):
- from leap.soledad.backends.couch import CouchDatabase
- db = CouchDatabase.open_database(self.couch_url + '/' + dbname,
- create=True)
- return db, db._replica_uid
-
- def delete_database(self, dbname):
- from leap.soledad.backends.couch import CouchDatabase
- CouchDatabase.delete_database(self.couch_url + '/' + dbname)
diff --git a/src/leap/soledad/backends/leap_backend.py b/src/leap/soledad/backends/leap_backend.py
deleted file mode 100644
index c3c52ee6..00000000
--- a/src/leap/soledad/backends/leap_backend.py
+++ /dev/null
@@ -1,193 +0,0 @@
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-from u1db import Document
-from u1db.remote import utils
-from u1db.remote.http_target import HTTPSyncTarget
-from u1db.remote.http_database import HTTPDatabase
-from u1db.errors import BrokenSyncStream
-
-import uuid
-
-
-class NoDefaultKey(Exception):
- pass
-
-
-class NoSoledadInstance(Exception):
- pass
-
-
-class DocumentEncryptionFailed(Exception):
- pass
-
-
-class LeapDocument(Document):
- """
- Encryptable and syncable document.
-
- LEAP Documents are standard u1db documents with cabability of returning an
- encrypted version of the document json string as well as setting document
- content based on an encrypted version of json string.
- """
-
- def __init__(self, doc_id=None, rev=None, json='{}', has_conflicts=False,
- encrypted_json=None, soledad=None, syncable=True):
- super(LeapDocument, self).__init__(doc_id, rev, json, has_conflicts)
- self._soledad = soledad
- self._syncable = syncable
- if encrypted_json:
- self.set_encrypted_json(encrypted_json)
-
- def get_encrypted_json(self):
- """
- Return document's json serialization encrypted with user's public key.
- """
- if not self._soledad:
- raise NoSoledadInstance()
- ciphertext = self._soledad.encrypt_symmetric(self.doc_id,
- self.get_json())
- return json.dumps({'_encrypted_json': ciphertext})
-
- def set_encrypted_json(self, encrypted_json):
- """
- Set document's content based on encrypted version of json string.
- """
- if not self._soledad:
- raise NoSoledadInstance()
- ciphertext = json.loads(encrypted_json)['_encrypted_json']
- plaintext = self._soledad.decrypt_symmetric(self.doc_id, ciphertext)
- return self.set_json(plaintext)
-
- def _get_syncable(self):
- return self._syncable
-
- def _set_syncable(self, syncable=True):
- self._syncable = syncable
-
- syncable = property(
- _get_syncable,
- _set_syncable,
- doc="Determine if document should be synced with server."
- )
-
- # Returning the revision as string solves the following exception in
- # Twisted web:
- # exceptions.TypeError: Can only pass-through bytes on Python 2
- def _get_rev(self):
- if self._rev is None:
- return None
- return str(self._rev)
-
- def _set_rev(self, rev):
- self._rev = rev
-
- rev = property(
- _get_rev,
- _set_rev,
- doc="Wrapper to ensure `doc.rev` is always returned as bytes.")
-
-
-class LeapSyncTarget(HTTPSyncTarget):
- """
- A SyncTarget that encrypts data before sending and decrypts data after
- receiving.
- """
-
- def __init__(self, url, creds=None, soledad=None):
- super(LeapSyncTarget, self).__init__(url, creds)
- self._soledad = soledad
-
- def _parse_sync_stream(self, data, return_doc_cb, ensure_callback=None):
- """
- Does the same as parent's method but ensures incoming content will be
- decrypted.
- """
- parts = data.splitlines() # one at a time
- if not parts or parts[0] != '[':
- raise BrokenSyncStream
- data = parts[1:-1]
- comma = False
- if data:
- line, comma = utils.check_and_strip_comma(data[0])
- res = json.loads(line)
- if ensure_callback and 'replica_uid' in res:
- ensure_callback(res['replica_uid'])
- for entry in data[1:]:
- if not comma: # missing in between comma
- raise BrokenSyncStream
- line, comma = utils.check_and_strip_comma(entry)
- entry = json.loads(line)
- # decrypt after receiving from server.
- doc = LeapDocument(entry['id'], entry['rev'],
- encrypted_json=entry['content'],
- soledad=self._soledad)
- return_doc_cb(doc, entry['gen'], entry['trans_id'])
- if parts[-1] != ']':
- try:
- partdic = json.loads(parts[-1])
- except ValueError:
- pass
- else:
- if isinstance(partdic, dict):
- self._error(partdic)
- raise BrokenSyncStream
- if not data or comma: # no entries or bad extra comma
- raise BrokenSyncStream
- return res
-
- def sync_exchange(self, docs_by_generations, source_replica_uid,
- last_known_generation, last_known_trans_id,
- return_doc_cb, ensure_callback=None):
- """
- Does the same as parent's method but encrypts content before syncing.
- """
- self._ensure_connection()
- if self._trace_hook: # for tests
- self._trace_hook('sync_exchange')
- url = '%s/sync-from/%s' % (self._url.path, source_replica_uid)
- self._conn.putrequest('POST', url)
- self._conn.putheader('content-type', 'application/x-u1db-sync-stream')
- for header_name, header_value in self._sign_request('POST', url, {}):
- self._conn.putheader(header_name, header_value)
- entries = ['[']
- size = 1
-
- def prepare(**dic):
- entry = comma + '\r\n' + json.dumps(dic)
- entries.append(entry)
- return len(entry)
-
- comma = ''
- size += prepare(
- last_known_generation=last_known_generation,
- last_known_trans_id=last_known_trans_id,
- ensure=ensure_callback is not None)
- comma = ','
- for doc, gen, trans_id in docs_by_generations:
- if doc.syncable:
- # encrypt and verify before sending to server.
- doc_content = doc.get_encrypted_json()
- if doc_content == doc.get_json():
- raise DocumentEncryptionFailed
- enc_doc = LeapDocument(doc.doc_id, doc.rev,
- encrypted_json=doc_content,
- soledad=self._soledad)
- if doc.get_json() != enc_doc.get_json():
- raise DocumentEncryptionFailed
- size += prepare(id=doc.doc_id, rev=doc.rev,
- content=doc_content,
- gen=gen, trans_id=trans_id)
- entries.append('\r\n]')
- size += len(entries[-1])
- self._conn.putheader('content-length', str(size))
- self._conn.endheaders()
- for entry in entries:
- self._conn.send(entry)
- entries = None
- data, _ = self._response()
- res = self._parse_sync_stream(data, return_doc_cb, ensure_callback)
- data = None
- return res['new_generation'], res['new_transaction_id']
diff --git a/src/leap/soledad/backends/objectstore.py b/src/leap/soledad/backends/objectstore.py
deleted file mode 100644
index 199107af..00000000
--- a/src/leap/soledad/backends/objectstore.py
+++ /dev/null
@@ -1,114 +0,0 @@
-from u1db.backends.inmemory import InMemoryDatabase
-from u1db import errors
-
-
-class ObjectStore(InMemoryDatabase):
- """
- A backend for storing u1db data in an object store.
- """
-
- @classmethod
- def open_database(cls, url, create, document_factory=None):
- raise NotImplementedError(cls.open_database)
-
- def __init__(self, replica_uid=None, document_factory=None):
- super(ObjectStore, self).__init__(replica_uid,
- document_factory=document_factory)
- # sync data in memory with data in object store
- if not self._get_doc(self.U1DB_DATA_DOC_ID):
- self._init_u1db_data()
- self._get_u1db_data()
-
- #-------------------------------------------------------------------------
- # methods from Database
- #-------------------------------------------------------------------------
-
- def _set_replica_uid(self, replica_uid):
- super(ObjectStore, self)._set_replica_uid(replica_uid)
- self._set_u1db_data()
-
- def _put_doc(self, doc):
- raise NotImplementedError(self._put_doc)
-
- def _get_doc(self, doc):
- raise NotImplementedError(self._get_doc)
-
- def get_all_docs(self, include_deleted=False):
- raise NotImplementedError(self.get_all_docs)
-
- def delete_doc(self, doc):
- old_doc = self._get_doc(doc.doc_id, check_for_conflicts=True)
- if old_doc is None:
- raise errors.DocumentDoesNotExist
- if old_doc.rev != doc.rev:
- raise errors.RevisionConflict()
- if old_doc.is_tombstone():
- raise errors.DocumentAlreadyDeleted
- if old_doc.has_conflicts:
- raise errors.ConflictedDoc()
- new_rev = self._allocate_doc_rev(doc.rev)
- doc.rev = new_rev
- doc.make_tombstone()
- self._put_and_update_indexes(old_doc, doc)
- return new_rev
-
- # index-related methods
-
- def create_index(self, index_name, *index_expressions):
- raise NotImplementedError(self.create_index)
-
- def delete_index(self, index_name):
- super(ObjectStore, self).delete_index(index_name)
- self._set_u1db_data()
-
- def _replace_conflicts(self, doc, conflicts):
- super(ObjectStore, self)._replace_conflicts(doc, conflicts)
- self._set_u1db_data()
-
- def _do_set_replica_gen_and_trans_id(self, other_replica_uid,
- other_generation,
- other_transaction_id):
- super(ObjectStore, self)._do_set_replica_gen_and_trans_id(
- other_replica_uid,
- other_generation,
- other_transaction_id)
- self._set_u1db_data()
-
- #-------------------------------------------------------------------------
- # implemented methods from CommonBackend
- #-------------------------------------------------------------------------
-
- def _put_and_update_indexes(self, old_doc, doc):
- for index in self._indexes.itervalues():
- if old_doc is not None and not old_doc.is_tombstone():
- index.remove_json(old_doc.doc_id, old_doc.get_json())
- if not doc.is_tombstone():
- index.add_json(doc.doc_id, doc.get_json())
- trans_id = self._allocate_transaction_id()
- self._put_doc(doc)
- self._transaction_log.append((doc.doc_id, trans_id))
- self._set_u1db_data()
-
- #-------------------------------------------------------------------------
- # methods specific for object stores
- #-------------------------------------------------------------------------
-
- U1DB_DATA_DOC_ID = 'u1db_data'
-
- def _get_u1db_data(self):
- """
- Fetch u1db configuration data from backend storage.
- """
- NotImplementedError(self._get_u1db_data)
-
- def _set_u1db_data(self):
- """
- Save u1db configuration data on backend storage.
- """
- NotImplementedError(self._set_u1db_data)
-
- def _init_u1db_data(self):
- """
- Initialize u1db configuration data on backend storage.
- """
- NotImplementedError(self._init_u1db_data)
diff --git a/src/leap/soledad/backends/sqlcipher.py b/src/leap/soledad/backends/sqlcipher.py
deleted file mode 100644
index c902b466..00000000
--- a/src/leap/soledad/backends/sqlcipher.py
+++ /dev/null
@@ -1,176 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""A U1DB implementation that uses SQLCipher as its persistence layer."""
-
-import os
-from pysqlcipher import dbapi2
-import time
-
-from u1db.backends.sqlite_backend import (
- SQLiteDatabase,
- SQLitePartialExpandDatabase,
-)
-from u1db import (
- errors,
-)
-
-from leap.soledad.backends.leap_backend import LeapDocument
-
-
-def open(path, password, create=True, document_factory=None, soledad=None):
- """Open a database at the given location.
-
- Will raise u1db.errors.DatabaseDoesNotExist if create=False and the
- database does not already exist.
-
- :param path: The filesystem path for the database to open.
- :param create: True/False, should the database be created if it doesn't
- already exist?
- :param document_factory: A function that will be called with the same
- parameters as Document.__init__.
- :return: An instance of Database.
- """
- return SQLCipherDatabase.open_database(
- path, password, create=create, document_factory=document_factory,
- soledad=soledad)
-
-
-class DatabaseIsNotEncrypted(Exception):
- """
- Exception raised when trying to open non-encrypted databases.
- """
- pass
-
-
-class SQLCipherDatabase(SQLitePartialExpandDatabase):
- """A U1DB implementation that uses SQLCipher as its persistence layer."""
-
- _index_storage_value = 'expand referenced encrypted'
-
- @classmethod
- def set_pragma_key(cls, db_handle, key):
- db_handle.cursor().execute("PRAGMA key = '%s'" % key)
-
- def __init__(self, sqlite_file, password, document_factory=None,
- soledad=None):
- """Create a new sqlcipher file."""
- self._check_if_db_is_encrypted(sqlite_file)
- self._db_handle = dbapi2.connect(sqlite_file)
- SQLCipherDatabase.set_pragma_key(self._db_handle, password)
- self._real_replica_uid = None
- self._ensure_schema()
- self._soledad = soledad
-
- def factory(doc_id=None, rev=None, json='{}', has_conflicts=False,
- encrypted_json=None, syncable=True):
- return LeapDocument(doc_id=doc_id, rev=rev, json=json,
- has_conflicts=has_conflicts,
- encrypted_json=encrypted_json,
- syncable=syncable, soledad=self._soledad)
- self.set_document_factory(factory)
-
- def _check_if_db_is_encrypted(self, sqlite_file):
- if not os.path.exists(sqlite_file):
- return
- else:
- try:
- # try to open an encrypted database with the regular u1db
- # backend should raise a DatabaseError exception.
- SQLitePartialExpandDatabase(sqlite_file)
- raise DatabaseIsNotEncrypted()
- except dbapi2.DatabaseError:
- pass
-
- @classmethod
- def _open_database(cls, sqlite_file, password, document_factory=None,
- soledad=None):
- if not os.path.isfile(sqlite_file):
- raise errors.DatabaseDoesNotExist()
- tries = 2
- while True:
- # Note: There seems to be a bug in sqlite 3.5.9 (with python2.6)
- # where without re-opening the database on Windows, it
- # doesn't see the transaction that was just committed
- db_handle = dbapi2.connect(sqlite_file)
- SQLCipherDatabase.set_pragma_key(db_handle, password)
- c = db_handle.cursor()
- v, err = cls._which_index_storage(c)
- db_handle.close()
- if v is not None:
- break
- # possibly another process is initializing it, wait for it to be
- # done
- if tries == 0:
- raise err # go for the richest error?
- tries -= 1
- time.sleep(cls.WAIT_FOR_PARALLEL_INIT_HALF_INTERVAL)
- return SQLCipherDatabase._sqlite_registry[v](
- sqlite_file, password, document_factory=document_factory,
- soledad=soledad)
-
- @classmethod
- def open_database(cls, sqlite_file, password, create, backend_cls=None,
- document_factory=None, soledad=None):
- try:
- return cls._open_database(sqlite_file, password,
- document_factory=document_factory,
- soledad=soledad)
- except errors.DatabaseDoesNotExist:
- if not create:
- raise
- if backend_cls is None:
- # default is SQLCipherPartialExpandDatabase
- backend_cls = SQLCipherDatabase
- return backend_cls(sqlite_file, password,
- document_factory=document_factory,
- soledad=soledad)
-
- def sync(self, url, creds=None, autocreate=True):
- """
- Synchronize encrypted documents with remote replica exposed at url.
- """
- from u1db.sync import Synchronizer
- from leap.soledad.backends.leap_backend import LeapSyncTarget
- return Synchronizer(
- self,
- LeapSyncTarget(url,
- creds=creds,
- soledad=self._soledad)).sync(autocreate=autocreate)
-
- def _extra_schema_init(self, c):
- c.execute(
- 'ALTER TABLE document '
- 'ADD COLUMN syncable BOOL NOT NULL DEFAULT TRUE')
-
- def _put_and_update_indexes(self, old_doc, doc):
- super(SQLCipherDatabase, self)._put_and_update_indexes(old_doc, doc)
- c = self._db_handle.cursor()
- c.execute('UPDATE document SET syncable=? WHERE doc_id=?',
- (doc.syncable, doc.doc_id))
-
- def _get_doc(self, doc_id, check_for_conflicts=False):
- doc = super(SQLCipherDatabase, self)._get_doc(doc_id,
- check_for_conflicts)
- if doc:
- c = self._db_handle.cursor()
- c.execute('SELECT syncable FROM document WHERE doc_id=?',
- (doc.doc_id,))
- doc.syncable = bool(c.fetchone()[0])
- return doc
-
-
-SQLiteDatabase.register_implementation(SQLCipherDatabase)
diff --git a/src/leap/soledad/server.py b/src/leap/soledad/server.py
deleted file mode 100644
index 4fc97be5..00000000
--- a/src/leap/soledad/server.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-An u1db server that stores data using couchdb.
-
-This should be run with:
- twistd -n web --wsgi=leap.soledad.server.application
-"""
-
-from twisted.web.wsgi import WSGIResource
-from twisted.internet import reactor
-from u1db.remote import http_app
-from leap.soledad.backends.couch import CouchServerState
-
-couch_url = 'http://localhost:5984'
-state = CouchServerState(couch_url)
-# TODO: change working dir to something meaningful
-state.set_workingdir('/tmp')
-# TODO: write a LeapHTTPApp that will use Couch as backend instead of SQLite
-application = http_app.HTTPApp(state)
-
-resource = WSGIResource(reactor, reactor.getThreadPool(), application)
diff --git a/src/leap/soledad/tests/__init__.py b/src/leap/soledad/tests/__init__.py
deleted file mode 100644
index 6135e648..00000000
--- a/src/leap/soledad/tests/__init__.py
+++ /dev/null
@@ -1,199 +0,0 @@
-import u1db
-from leap.soledad import Soledad
-from leap.soledad.backends.leap_backend import LeapDocument
-from leap.testing.basetest import BaseLeapTest
-
-
-#-----------------------------------------------------------------------------
-# Some tests inherit from BaseSoledadTest in order to have a working Soledad
-# instance in each test.
-#-----------------------------------------------------------------------------
-
-class BaseSoledadTest(BaseLeapTest):
-
- def setUp(self):
- # config info
- self.gnupg_home = "%s/gnupg" % self.tempdir
- self.db1_file = "%s/db1.u1db" % self.tempdir
- self.db2_file = "%s/db2.u1db" % self.tempdir
- self.email = 'leap@leap.se'
- # open test dbs
- self._db1 = u1db.open(self.db1_file, create=True,
- document_factory=LeapDocument)
- self._db2 = u1db.open(self.db2_file, create=True,
- document_factory=LeapDocument)
- # open a soledad instance
- self._soledad = Soledad(self.email, gpghome=self.gnupg_home,
- initialize=False)
- self._soledad._gpg.import_keys(PUBLIC_KEY)
- self._soledad._gpg.import_keys(PRIVATE_KEY)
- self._soledad._initialize()
-
- def tearDown(self):
- self._db1.close()
- self._db2.close()
- self._soledad.close()
-
-
-# Key material for testing
-KEY_FINGERPRINT = "E36E738D69173C13D709E44F2F455E2824D18DDF"
-PUBLIC_KEY = """
------BEGIN PGP PUBLIC KEY BLOCK-----
-Version: GnuPG v1.4.10 (GNU/Linux)
-
-mQINBFC9+dkBEADNRfwV23TWEoGc/x0wWH1P7PlXt8MnC2Z1kKaKKmfnglVrpOiz
-iLWoiU58sfZ0L5vHkzXHXCBf6Eiy/EtUIvdiWAn+yASJ1mk5jZTBKO/WMAHD8wTO
-zpMsFmWyg3xc4DkmFa9KQ5EVU0o/nqPeyQxNMQN7px5pPwrJtJFmPxnxm+aDkPYx
-irDmz/4DeDNqXliazGJKw7efqBdlwTHkl9Akw2gwy178pmsKwHHEMOBOFFvX61AT
-huKqHYmlCGSliwbrJppTG7jc1/ls3itrK+CWTg4txREkSpEVmfcASvw/ZqLbjgfs
-d/INMwXnR9U81O8+7LT6yw/ca4ppcFoJD7/XJbkRiML6+bJ4Dakiy6i727BzV17g
-wI1zqNvm5rAhtALKfACha6YO43aJzairO4II1wxVHvRDHZn2IuKDDephQ3Ii7/vb
-hUOf6XCSmchkAcpKXUOvbxm1yfB1LRa64mMc2RcZxf4mW7KQkulBsdV5QG2276lv
-U2UUy2IutXcGP5nXC+f6sJJGJeEToKJ57yiO/VWJFjKN8SvP+7AYsQSqINUuEf6H
-T5gCPCraGMkTUTPXrREvu7NOohU78q6zZNaL3GW8ai7eSeANSuQ8Vzffx7Wd8Y7i
-Pw9sYj0SMFs1UgjbuL6pO5ueHh+qyumbtAq2K0Bci0kqOcU4E9fNtdiovQARAQAB
-tBxMZWFwIFRlc3QgS2V5IDxsZWFwQGxlYXAuc2U+iQI3BBMBCAAhBQJQvfnZAhsD
-BQsJCAcDBRUKCQgLBRYCAwEAAh4BAheAAAoJEC9FXigk0Y3fT7EQAKH3IuRniOpb
-T/DDIgwwjz3oxB/W0DDMyPXowlhSOuM0rgGfntBpBb3boezEXwL86NPQxNGGruF5
-hkmecSiuPSvOmQlqlS95NGQp6hNG0YaKColh+Q5NTspFXCAkFch9oqUje0LdxfSP
-QfV9UpeEvGyPmk1I9EJV/YDmZ4+Djge1d7qhVZInz4Rx1NrSyF/Tc2EC0VpjQFsU
-Y9Kb2YBBR7ivG6DBc8ty0jJXi7B4WjkFcUEJviQpMF2dCLdonCehYs1PqsN1N7j+
-eFjQd+hqVMJgYuSGKjvuAEfClM6MQw7+FmFwMyLgK/Ew/DttHEDCri77SPSkOGSI
-txCzhTg6798f6mJr7WcXmHX1w1Vcib5FfZ8vTDFVhz/XgAgArdhPo9V6/1dgSSiB
-KPQ/spsco6u5imdOhckERE0lnAYvVT6KE81TKuhF/b23u7x+Wdew6kK0EQhYA7wy
-7LmlaNXc7rMBQJ9Z60CJ4JDtatBWZ0kNrt2VfdDHVdqBTOpl0CraNUjWE5YMDasr
-K2dF5IX8D3uuYtpZnxqg0KzyLg0tzL0tvOL1C2iudgZUISZNPKbS0z0v+afuAAnx
-2pTC3uezbh2Jt8SWTLhll4i0P4Ps5kZ6HQUO56O+/Z1cWovX+mQekYFmERySDR9n
-3k1uAwLilJmRmepGmvYbB8HloV8HqwgguQINBFC9+dkBEAC0I/xn1uborMgDvBtf
-H0sEhwnXBC849/32zic6udB6/3Efk9nzbSpL3FSOuXITZsZgCHPkKarnoQ2ztMcS
-sh1ke1C5gQGms75UVmM/nS+2YI4vY8OX/GC/on2vUyncqdH+bR6xH5hx4NbWpfTs
-iQHmz5C6zzS/kuabGdZyKRaZHt23WQ7JX/4zpjqbC99DjHcP9BSk7tJ8wI4bkMYD
-uFVQdT9O6HwyKGYwUU4sAQRAj7XCTGvVbT0dpgJwH4RmrEtJoHAx4Whg8mJ710E0
-GCmzf2jqkNuOw76ivgk27Kge+Hw00jmJjQhHY0yVbiaoJwcRrPKzaSjEVNgrpgP3
-lXPRGQArgESsIOTeVVHQ8fhK2YtTeCY9rIiO+L0OX2xo9HK7hfHZZWL6rqymXdyS
-fhzh/f6IPyHFWnvj7Brl7DR8heMikygcJqv+ed2yx7iLyCUJ10g12I48+aEj1aLe
-dP7lna32iY8/Z0SHQLNH6PXO9SlPcq2aFUgKqE75A/0FMk7CunzU1OWr2ZtTLNO1
-WT/13LfOhhuEq9jTyTosn0WxBjJKq18lnhzCXlaw6EAtbA7CUwsD3CTPR56aAXFK
-3I7KXOVAqggrvMe5Tpdg5drfYpI8hZovL5aAgb+7Y5ta10TcJdUhS5K3kFAWe/td
-U0cmWUMDP1UMSQ5Jg6JIQVWhSwARAQABiQIfBBgBCAAJBQJQvfnZAhsMAAoJEC9F
-Xigk0Y3fRwsP/i0ElYCyxeLpWJTwo1iCLkMKz2yX1lFVa9nT1BVTPOQwr/IAc5OX
-NdtbJ14fUsKL5pWgW8OmrXtwZm1y4euI1RPWWubG01ouzwnGzv26UcuHeqC5orZj
-cOnKtL40y8VGMm8LoicVkRJH8blPORCnaLjdOtmA3rx/v2EXrJpSa3AhOy0ZSRXk
-ZSrK68AVNwamHRoBSYyo0AtaXnkPX4+tmO8X8BPfj125IljubvwZPIW9VWR9UqCE
-VPfDR1XKegVb6VStIywF7kmrknM1C5qUY28rdZYWgKorw01hBGV4jTW0cqde3N51
-XT1jnIAa+NoXUM9uQoGYMiwrL7vNsLlyyiW5ayDyV92H/rIuiqhFgbJsHTlsm7I8
-oGheR784BagAA1NIKD1qEO9T6Kz9lzlDaeWS5AUKeXrb7ZJLI1TTCIZx5/DxjLqM
-Tt/RFBpVo9geZQrvLUqLAMwdaUvDXC2c6DaCPXTh65oCZj/hqzlJHH+RoTWWzKI+
-BjXxgUWF9EmZUBrg68DSmI+9wuDFsjZ51BcqvJwxyfxtTaWhdoYqH/UQS+D1FP3/
-diZHHlzwVwPICzM9ooNTgbrcDzyxRkIVqsVwBq7EtzcvgYUyX53yG25Giy6YQaQ2
-ZtQ/VymwFL3XdUWV6B/hU4PVAFvO3qlOtdJ6TpE+nEWgcWjCv5g7RjXX
-=MuOY
------END PGP PUBLIC KEY BLOCK-----
-"""
-PRIVATE_KEY = """
------BEGIN PGP PRIVATE KEY BLOCK-----
-Version: GnuPG v1.4.10 (GNU/Linux)
-
-lQcYBFC9+dkBEADNRfwV23TWEoGc/x0wWH1P7PlXt8MnC2Z1kKaKKmfnglVrpOiz
-iLWoiU58sfZ0L5vHkzXHXCBf6Eiy/EtUIvdiWAn+yASJ1mk5jZTBKO/WMAHD8wTO
-zpMsFmWyg3xc4DkmFa9KQ5EVU0o/nqPeyQxNMQN7px5pPwrJtJFmPxnxm+aDkPYx
-irDmz/4DeDNqXliazGJKw7efqBdlwTHkl9Akw2gwy178pmsKwHHEMOBOFFvX61AT
-huKqHYmlCGSliwbrJppTG7jc1/ls3itrK+CWTg4txREkSpEVmfcASvw/ZqLbjgfs
-d/INMwXnR9U81O8+7LT6yw/ca4ppcFoJD7/XJbkRiML6+bJ4Dakiy6i727BzV17g
-wI1zqNvm5rAhtALKfACha6YO43aJzairO4II1wxVHvRDHZn2IuKDDephQ3Ii7/vb
-hUOf6XCSmchkAcpKXUOvbxm1yfB1LRa64mMc2RcZxf4mW7KQkulBsdV5QG2276lv
-U2UUy2IutXcGP5nXC+f6sJJGJeEToKJ57yiO/VWJFjKN8SvP+7AYsQSqINUuEf6H
-T5gCPCraGMkTUTPXrREvu7NOohU78q6zZNaL3GW8ai7eSeANSuQ8Vzffx7Wd8Y7i
-Pw9sYj0SMFs1UgjbuL6pO5ueHh+qyumbtAq2K0Bci0kqOcU4E9fNtdiovQARAQAB
-AA/+JHtlL39G1wsH9R6UEfUQJGXR9MiIiwZoKcnRB2o8+DS+OLjg0JOh8XehtuCs
-E/8oGQKtQqa5bEIstX7IZoYmYFiUQi9LOzIblmp2vxOm+HKkxa4JszWci2/ZmC3t
-KtaA4adl9XVnshoQ7pijuCMUKB3naBEOAxd8s9d/JeReGIYkJErdrnVfNk5N71Ds
-FmH5Ll3XtEDvgBUQP3nkA6QFjpsaB94FHjL3gDwum/cxzj6pCglcvHOzEhfY0Ddb
-J967FozQTaf2JW3O+w3LOqtcKWpq87B7+O61tVidQPSSuzPjCtFF0D2LC9R/Hpky
-KTMQ6CaKja4MPhjwywd4QPcHGYSqjMpflvJqi+kYIt8psUK/YswWjnr3r4fbuqVY
-VhtiHvnBHQjz135lUqWvEz4hM3Xpnxydx7aRlv5NlevK8+YIO5oFbWbGNTWsPZI5
-jpoFBpSsnR1Q5tnvtNHauvoWV+XN2qAOBTG+/nEbDYH6Ak3aaE9jrpTdYh0CotYF
-q7csANsDy3JvkAzeU6WnYpsHHaAjqOGyiZGsLej1UcXPFMosE/aUo4WQhiS8Zx2c
-zOVKOi/X5vQ2GdNT9Qolz8AriwzsvFR+bxPzyd8V6ALwDsoXvwEYinYBKK8j0OPv
-OOihSR6HVsuP9NUZNU9ewiGzte/+/r6pNXHvR7wTQ8EWLcEIAN6Zyrb0bHZTIlxt
-VWur/Ht2mIZrBaO50qmM5RD3T5oXzWXi/pjLrIpBMfeZR9DWfwQwjYzwqi7pxtYx
-nJvbMuY505rfnMoYxb4J+cpRXV8MS7Dr1vjjLVUC9KiwSbM3gg6emfd2yuA93ihv
-Pe3mffzLIiQa4mRE3wtGcioC43nWuV2K2e1KjxeFg07JhrezA/1Cak505ab/tmvP
-4YmjR5c44+yL/YcQ3HdFgs4mV+nVbptRXvRcPpolJsgxPccGNdvHhsoR4gwXMS3F
-RRPD2z6x8xeN73Q4KH3bm01swQdwFBZbWVfmUGLxvN7leCdfs9+iFJyqHiCIB6Iv
-mQfp8F0IAOwSo8JhWN+V1dwML4EkIrM8wUb4yecNLkyR6TpPH/qXx4PxVMC+vy6x
-sCtjeHIwKE+9vqnlhd5zOYh7qYXEJtYwdeDDmDbL8oks1LFfd+FyAuZXY33DLwn0
-cRYsr2OEZmaajqUB3NVmj3H4uJBN9+paFHyFSXrH68K1Fk2o3n+RSf2EiX+eICwI
-L6rqoF5sSVUghBWdNegV7qfy4anwTQwrIMGjgU5S6PKW0Dr/3iO5z3qQpGPAj5OW
-ATqPWkDICLbObPxD5cJlyyNE2wCA9VVc6/1d6w4EVwSq9h3/WTpATEreXXxTGptd
-LNiTA1nmakBYNO2Iyo3djhaqBdWjk+EIAKtVEnJH9FAVwWOvaj1RoZMA5DnDMo7e
-SnhrCXl8AL7Z1WInEaybasTJXn1uQ8xY52Ua4b8cbuEKRKzw/70NesFRoMLYoHTO
-dyeszvhoDHberpGRTciVmpMu7Hyi33rM31K9epA4ib6QbbCHnxkWOZB+Bhgj1hJ8
-xb4RBYWiWpAYcg0+DAC3w9gfxQhtUlZPIbmbrBmrVkO2GVGUj8kH6k4UV6kUHEGY
-HQWQR0HcbKcXW81ZXCCD0l7ROuEWQtTe5Jw7dJ4/QFuqZnPutXVRNOZqpl6eRShw
-7X2/a29VXBpmHA95a88rSQsL+qm7Fb3prqRmuMCtrUZgFz7HLSTuUMR867QcTGVh
-cCBUZXN0IEtleSA8bGVhcEBsZWFwLnNlPokCNwQTAQgAIQUCUL352QIbAwULCQgH
-AwUVCgkICwUWAgMBAAIeAQIXgAAKCRAvRV4oJNGN30+xEACh9yLkZ4jqW0/wwyIM
-MI896MQf1tAwzMj16MJYUjrjNK4Bn57QaQW926HsxF8C/OjT0MTRhq7heYZJnnEo
-rj0rzpkJapUveTRkKeoTRtGGigqJYfkOTU7KRVwgJBXIfaKlI3tC3cX0j0H1fVKX
-hLxsj5pNSPRCVf2A5mePg44HtXe6oVWSJ8+EcdTa0shf03NhAtFaY0BbFGPSm9mA
-QUe4rxugwXPLctIyV4uweFo5BXFBCb4kKTBdnQi3aJwnoWLNT6rDdTe4/nhY0Hfo
-alTCYGLkhio77gBHwpTOjEMO/hZhcDMi4CvxMPw7bRxAwq4u+0j0pDhkiLcQs4U4
-Ou/fH+pia+1nF5h19cNVXIm+RX2fL0wxVYc/14AIAK3YT6PVev9XYEkogSj0P7Kb
-HKOruYpnToXJBERNJZwGL1U+ihPNUyroRf29t7u8flnXsOpCtBEIWAO8Muy5pWjV
-3O6zAUCfWetAieCQ7WrQVmdJDa7dlX3Qx1XagUzqZdAq2jVI1hOWDA2rKytnReSF
-/A97rmLaWZ8aoNCs8i4NLcy9Lbzi9QtornYGVCEmTTym0tM9L/mn7gAJ8dqUwt7n
-s24dibfElky4ZZeItD+D7OZGeh0FDuejvv2dXFqL1/pkHpGBZhEckg0fZ95NbgMC
-4pSZkZnqRpr2GwfB5aFfB6sIIJ0HGARQvfnZARAAtCP8Z9bm6KzIA7wbXx9LBIcJ
-1wQvOPf99s4nOrnQev9xH5PZ820qS9xUjrlyE2bGYAhz5Cmq56ENs7THErIdZHtQ
-uYEBprO+VFZjP50vtmCOL2PDl/xgv6J9r1Mp3KnR/m0esR+YceDW1qX07IkB5s+Q
-us80v5LmmxnWcikWmR7dt1kOyV/+M6Y6mwvfQ4x3D/QUpO7SfMCOG5DGA7hVUHU/
-Tuh8MihmMFFOLAEEQI+1wkxr1W09HaYCcB+EZqxLSaBwMeFoYPJie9dBNBgps39o
-6pDbjsO+or4JNuyoHvh8NNI5iY0IR2NMlW4mqCcHEazys2koxFTYK6YD95Vz0RkA
-K4BErCDk3lVR0PH4StmLU3gmPayIjvi9Dl9saPRyu4Xx2WVi+q6spl3ckn4c4f3+
-iD8hxVp74+wa5ew0fIXjIpMoHCar/nndsse4i8glCddINdiOPPmhI9Wi3nT+5Z2t
-9omPP2dEh0CzR+j1zvUpT3KtmhVICqhO+QP9BTJOwrp81NTlq9mbUyzTtVk/9dy3
-zoYbhKvY08k6LJ9FsQYySqtfJZ4cwl5WsOhALWwOwlMLA9wkz0eemgFxStyOylzl
-QKoIK7zHuU6XYOXa32KSPIWaLy+WgIG/u2ObWtdE3CXVIUuSt5BQFnv7XVNHJllD
-Az9VDEkOSYOiSEFVoUsAEQEAAQAP/1AagnZQZyzHDEgw4QELAspYHCWLXE5aZInX
-wTUJhK31IgIXNn9bJ0hFiSpQR2xeMs9oYtRuPOu0P8oOFMn4/z374fkjZy8QVY3e
-PlL+3EUeqYtkMwlGNmVw5a/NbNuNfm5Darb7pEfbYd1gPcni4MAYw7R2SG/57GbC
-9gucvspHIfOSfBNLBthDzmK8xEKe1yD2eimfc2T7IRYb6hmkYfeds5GsqvGI6mwI
-85h4uUHWRc5JOlhVM6yX8hSWx0L60Z3DZLChmc8maWnFXd7C8eQ6P1azJJbW71Ih
-7CoK0XW4LE82vlQurSRFgTwfl7wFYszW2bOzCuhHDDtYnwH86Nsu0DC78ZVRnvxn
-E8Ke/AJgrdhIOo4UAyR+aZD2+2mKd7/waOUTUrUtTzc7i8N3YXGi/EIaNReBXaq+
-ZNOp24BlFzRp+FCF/pptDW9HjPdiV09x0DgICmeZS4Gq/4vFFIahWctg52NGebT0
-Idxngjj+xDtLaZlLQoOz0n5ByjO/Wi0ANmMv1sMKCHhGvdaSws2/PbMR2r4caj8m
-KXpIgdinM/wUzHJ5pZyF2U/qejsRj8Kw8KH/tfX4JCLhiaP/mgeTuWGDHeZQERAT
-xPmRFHaLP9/ZhvGNh6okIYtrKjWTLGoXvKLHcrKNisBLSq+P2WeFrlme1vjvJMo/
-jPwLT5o9CADQmcbKZ+QQ1ZM9v99iDZol7SAMZX43JC019sx6GK0u6xouJBcLfeB4
-OXacTgmSYdTa9RM9fbfVpti01tJ84LV2SyL/VJq/enJF4XQPSynT/tFTn1PAor6o
-tEAAd8fjKdJ6LnD5wb92SPHfQfXqI84rFEO8rUNIE/1ErT6DYifDzVCbfD2KZdoF
-cOSp7TpD77sY1bs74ocBX5ejKtd+aH99D78bJSMM4pSDZsIEwnomkBHTziubPwJb
-OwnATy0LmSMAWOw5rKbsh5nfwCiUTM20xp0t5JeXd+wPVWbpWqI2EnkCEN+RJr9i
-7dp/ymDQ+Yt5wrsN3NwoyiexPOG91WQVCADdErHsnglVZZq9Z8Wx7KwecGCUurJ2
-H6lKudv5YOxPnAzqZS5HbpZd/nRTMZh2rdXCr5m2YOuewyYjvM757AkmUpM09zJX
-MQ1S67/UX2y8/74TcRF97Ncx9HeELs92innBRXoFitnNguvcO6Esx4BTe1OdU6qR
-ER3zAmVf22Le9ciXbu24DN4mleOH+OmBx7X2PqJSYW9GAMTsRB081R6EWKH7romQ
-waxFrZ4DJzZ9ltyosEJn5F32StyLrFxpcrdLUoEaclZCv2qka7sZvi0EvovDVEBU
-e10jOx9AOwf8Gj2ufhquQ6qgVYCzbP+YrodtkFrXRS3IsljIchj1M2ffB/0bfoUs
-rtER9pLvYzCjBPg8IfGLw0o754Qbhh/ReplCRTusP/fQMybvCvfxreS3oyEriu/G
-GufRomjewZ8EMHDIgUsLcYo2UHZsfF7tcazgxMGmMvazp4r8vpgrvW/8fIN/6Adu
-tF+WjWDTvJLFJCe6O+BFJOWrssNrrra1zGtLC1s8s+Wfpe+bGPL5zpHeebGTwH1U
-22eqgJArlEKxrfarz7W5+uHZJHSjF/K9ZvunLGD0n9GOPMpji3UO3zeM8IYoWn7E
-/EWK1XbjnssNemeeTZ+sDh+qrD7BOi+vCX1IyBxbfqnQfJZvmcPWpruy1UsO+aIC
-0GY8Jr3OL69dDQ21jueJAh8EGAEIAAkFAlC9+dkCGwwACgkQL0VeKCTRjd9HCw/+
-LQSVgLLF4ulYlPCjWIIuQwrPbJfWUVVr2dPUFVM85DCv8gBzk5c121snXh9Swovm
-laBbw6ate3BmbXLh64jVE9Za5sbTWi7PCcbO/bpRy4d6oLmitmNw6cq0vjTLxUYy
-bwuiJxWREkfxuU85EKdouN062YDevH+/YResmlJrcCE7LRlJFeRlKsrrwBU3BqYd
-GgFJjKjQC1peeQ9fj62Y7xfwE9+PXbkiWO5u/Bk8hb1VZH1SoIRU98NHVcp6BVvp
-VK0jLAXuSauSczULmpRjbyt1lhaAqivDTWEEZXiNNbRyp17c3nVdPWOcgBr42hdQ
-z25CgZgyLCsvu82wuXLKJblrIPJX3Yf+si6KqEWBsmwdOWybsjygaF5HvzgFqAAD
-U0goPWoQ71PorP2XOUNp5ZLkBQp5etvtkksjVNMIhnHn8PGMuoxO39EUGlWj2B5l
-Cu8tSosAzB1pS8NcLZzoNoI9dOHrmgJmP+GrOUkcf5GhNZbMoj4GNfGBRYX0SZlQ
-GuDrwNKYj73C4MWyNnnUFyq8nDHJ/G1NpaF2hiof9RBL4PUU/f92JkceXPBXA8gL
-Mz2ig1OButwPPLFGQhWqxXAGrsS3Ny+BhTJfnfIbbkaLLphBpDZm1D9XKbAUvdd1
-RZXoH+FTg9UAW87eqU610npOkT6cRaBxaMK/mDtGNdc=
-=JTFu
------END PGP PRIVATE KEY BLOCK-----
-"""
diff --git a/src/leap/soledad/tests/couchdb.ini.template b/src/leap/soledad/tests/couchdb.ini.template
deleted file mode 100644
index 7d0316f0..00000000
--- a/src/leap/soledad/tests/couchdb.ini.template
+++ /dev/null
@@ -1,222 +0,0 @@
-; etc/couchdb/default.ini.tpl. Generated from default.ini.tpl.in by configure.
-
-; Upgrading CouchDB will overwrite this file.
-
-[couchdb]
-database_dir = %(tempdir)s/lib
-view_index_dir = %(tempdir)s/lib
-max_document_size = 4294967296 ; 4 GB
-os_process_timeout = 5000 ; 5 seconds. for view and external servers.
-max_dbs_open = 100
-delayed_commits = true ; set this to false to ensure an fsync before 201 Created is returned
-uri_file = %(tempdir)s/lib/couch.uri
-file_compression = snappy
-
-[database_compaction]
-; larger buffer sizes can originate smaller files
-doc_buffer_size = 524288 ; value in bytes
-checkpoint_after = 5242880 ; checkpoint after every N bytes were written
-
-[view_compaction]
-; larger buffer sizes can originate smaller files
-keyvalue_buffer_size = 2097152 ; value in bytes
-
-[httpd]
-port = 0
-bind_address = 127.0.0.1
-authentication_handlers = {couch_httpd_oauth, oauth_authentication_handler}, {couch_httpd_auth, cookie_authentication_handler}, {couch_httpd_auth, default_authentication_handler}
-default_handler = {couch_httpd_db, handle_request}
-secure_rewrites = true
-vhost_global_handlers = _utils, _uuids, _session, _oauth, _users
-allow_jsonp = false
-; Options for the MochiWeb HTTP server.
-;server_options = [{backlog, 128}, {acceptor_pool_size, 16}]
-; For more socket options, consult Erlang's module 'inet' man page.
-;socket_options = [{recbuf, 262144}, {sndbuf, 262144}, {nodelay, true}]
-log_max_chunk_size = 1000000
-
-[log]
-file = %(tempdir)s/log/couch.log
-level = info
-include_sasl = true
-
-[couch_httpd_auth]
-authentication_db = _users
-authentication_redirect = /_utils/session.html
-require_valid_user = false
-timeout = 600 ; number of seconds before automatic logout
-auth_cache_size = 50 ; size is number of cache entries
-allow_persistent_cookies = false ; set to true to allow persistent cookies
-
-[couch_httpd_oauth]
-; If set to 'true', oauth token and consumer secrets will be looked up
-; in the authentication database (_users). These secrets are stored in
-; a top level property named "oauth" in user documents. Example:
-; {
-; "_id": "org.couchdb.user:joe",
-; "type": "user",
-; "name": "joe",
-; "password_sha": "fe95df1ca59a9b567bdca5cbaf8412abd6e06121",
-; "salt": "4e170ffeb6f34daecfd814dfb4001a73"
-; "roles": ["foo", "bar"],
-; "oauth": {
-; "consumer_keys": {
-; "consumerKey1": "key1Secret",
-; "consumerKey2": "key2Secret"
-; },
-; "tokens": {
-; "token1": "token1Secret",
-; "token2": "token2Secret"
-; }
-; }
-; }
-use_users_db = false
-
-[query_servers]
-; javascript = %(tempdir)s/server/main.js
-
-
-; Changing reduce_limit to false will disable reduce_limit.
-; If you think you're hitting reduce_limit with a "good" reduce function,
-; please let us know on the mailing list so we can fine tune the heuristic.
-[query_server_config]
-reduce_limit = true
-os_process_limit = 25
-
-[daemons]
-view_manager={couch_view, start_link, []}
-external_manager={couch_external_manager, start_link, []}
-query_servers={couch_query_servers, start_link, []}
-vhosts={couch_httpd_vhost, start_link, []}
-httpd={couch_httpd, start_link, []}
-stats_aggregator={couch_stats_aggregator, start, []}
-stats_collector={couch_stats_collector, start, []}
-uuids={couch_uuids, start, []}
-auth_cache={couch_auth_cache, start_link, []}
-replication_manager={couch_replication_manager, start_link, []}
-os_daemons={couch_os_daemons, start_link, []}
-compaction_daemon={couch_compaction_daemon, start_link, []}
-
-[httpd_global_handlers]
-/ = {couch_httpd_misc_handlers, handle_welcome_req, <<"Welcome">>}
-
-_all_dbs = {couch_httpd_misc_handlers, handle_all_dbs_req}
-_active_tasks = {couch_httpd_misc_handlers, handle_task_status_req}
-_config = {couch_httpd_misc_handlers, handle_config_req}
-_replicate = {couch_httpd_replicator, handle_req}
-_uuids = {couch_httpd_misc_handlers, handle_uuids_req}
-_restart = {couch_httpd_misc_handlers, handle_restart_req}
-_stats = {couch_httpd_stats_handlers, handle_stats_req}
-_log = {couch_httpd_misc_handlers, handle_log_req}
-_session = {couch_httpd_auth, handle_session_req}
-_oauth = {couch_httpd_oauth, handle_oauth_req}
-
-[httpd_db_handlers]
-_view_cleanup = {couch_httpd_db, handle_view_cleanup_req}
-_compact = {couch_httpd_db, handle_compact_req}
-_design = {couch_httpd_db, handle_design_req}
-_temp_view = {couch_httpd_view, handle_temp_view_req}
-_changes = {couch_httpd_db, handle_changes_req}
-
-; The external module takes an optional argument allowing you to narrow it to a
-; single script. Otherwise the script name is inferred from the first path section
-; after _external's own path.
-; _mypath = {couch_httpd_external, handle_external_req, <<"mykey">>}
-; _external = {couch_httpd_external, handle_external_req}
-
-[httpd_design_handlers]
-_view = {couch_httpd_view, handle_view_req}
-_show = {couch_httpd_show, handle_doc_show_req}
-_list = {couch_httpd_show, handle_view_list_req}
-_info = {couch_httpd_db, handle_design_info_req}
-_rewrite = {couch_httpd_rewrite, handle_rewrite_req}
-_update = {couch_httpd_show, handle_doc_update_req}
-
-; enable external as an httpd handler, then link it with commands here.
-; note, this api is still under consideration.
-; [external]
-; mykey = /path/to/mycommand
-
-; Here you can setup commands for CouchDB to manage
-; while it is alive. It will attempt to keep each command
-; alive if it exits.
-; [os_daemons]
-; some_daemon_name = /path/to/script -with args
-
-
-[uuids]
-; Known algorithms:
-; random - 128 bits of random awesome
-; All awesome, all the time.
-; sequential - monotonically increasing ids with random increments
-; First 26 hex characters are random. Last 6 increment in
-; random amounts until an overflow occurs. On overflow, the
-; random prefix is regenerated and the process starts over.
-; utc_random - Time since Jan 1, 1970 UTC with microseconds
-; First 14 characters are the time in hex. Last 18 are random.
-algorithm = sequential
-
-[stats]
-; rate is in milliseconds
-rate = 1000
-; sample intervals are in seconds
-samples = [0, 60, 300, 900]
-
-[attachments]
-compression_level = 8 ; from 1 (lowest, fastest) to 9 (highest, slowest), 0 to disable compression
-compressible_types = text/*, application/javascript, application/json, application/xml
-
-[replicator]
-db = _replicator
-; Maximum replicaton retry count can be a non-negative integer or "infinity".
-max_replication_retry_count = 10
-; More worker processes can give higher network throughput but can also
-; imply more disk and network IO.
-worker_processes = 4
-; With lower batch sizes checkpoints are done more frequently. Lower batch sizes
-; also reduce the total amount of used RAM memory.
-worker_batch_size = 500
-; Maximum number of HTTP connections per replication.
-http_connections = 20
-; HTTP connection timeout per replication.
-; Even for very fast/reliable networks it might need to be increased if a remote
-; database is too busy.
-connection_timeout = 30000
-; If a request fails, the replicator will retry it up to N times.
-retries_per_request = 10
-; Some socket options that might boost performance in some scenarios:
-; {nodelay, boolean()}
-; {sndbuf, integer()}
-; {recbuf, integer()}
-; {priority, integer()}
-; See the `inet` Erlang module's man page for the full list of options.
-socket_options = [{keepalive, true}, {nodelay, false}]
-; Path to a file containing the user's certificate.
-;cert_file = /full/path/to/server_cert.pem
-; Path to file containing user's private PEM encoded key.
-;key_file = /full/path/to/server_key.pem
-; String containing the user's password. Only used if the private keyfile is password protected.
-;password = somepassword
-; Set to true to validate peer certificates.
-verify_ssl_certificates = false
-; File containing a list of peer trusted certificates (in the PEM format).
-;ssl_trusted_certificates_file = /etc/ssl/certs/ca-certificates.crt
-; Maximum peer certificate depth (must be set even if certificate validation is off).
-ssl_certificate_max_depth = 3
-
-[compaction_daemon]
-; The delay, in seconds, between each check for which database and view indexes
-; need to be compacted.
-check_interval = 300
-; If a database or view index file is smaller then this value (in bytes),
-; compaction will not happen. Very small files always have a very high
-; fragmentation therefore it's not worth to compact them.
-min_file_size = 131072
-
-[compactions]
-; List of compaction rules for the compaction daemon.
-
-
-;[admins]
-;testuser = -hashed-f50a252c12615697c5ed24ec5cd56b05d66fe91e,b05471ba260132953930cf9f97f327f5
-; pass for above user is 'testpass' \ No newline at end of file
diff --git a/src/leap/soledad/tests/test_couch.py b/src/leap/soledad/tests/test_couch.py
deleted file mode 100644
index bbdf44b4..00000000
--- a/src/leap/soledad/tests/test_couch.py
+++ /dev/null
@@ -1,298 +0,0 @@
-"""Test ObjectStore backend bits.
-
-For these tests to run, a couch server has to be running on (default) port
-5984.
-"""
-
-import copy
-from leap.soledad.backends import couch
-from leap.soledad.tests import u1db_tests as tests
-from leap.soledad.tests.u1db_tests import test_backends
-from leap.soledad.tests.u1db_tests import test_sync
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-
-#-----------------------------------------------------------------------------
-# A wrapper for running couchdb locally.
-#-----------------------------------------------------------------------------
-
-import re
-import os
-import tempfile
-import subprocess
-import time
-import unittest
-
-
-# from: https://github.com/smcq/paisley/blob/master/paisley/test/util.py
-# TODO: include license of above project.
-class CouchDBWrapper(object):
- """
- Wrapper for external CouchDB instance which is started and stopped for
- testing.
- """
-
- def start(self):
- self.tempdir = tempfile.mkdtemp(suffix='.couch.test')
-
- path = os.path.join(os.path.dirname(__file__),
- 'couchdb.ini.template')
- handle = open(path)
- conf = handle.read() % {
- 'tempdir': self.tempdir,
- }
- handle.close()
-
- confPath = os.path.join(self.tempdir, 'test.ini')
- handle = open(confPath, 'w')
- handle.write(conf)
- handle.close()
-
- # create the dirs from the template
- os.mkdir(os.path.join(self.tempdir, 'lib'))
- os.mkdir(os.path.join(self.tempdir, 'log'))
- args = ['couchdb', '-n' '-a', confPath]
- #null = open('/dev/null', 'w')
- self.process = subprocess.Popen(
- args, env=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
- close_fds=True)
- # find port
- logPath = os.path.join(self.tempdir, 'log', 'couch.log')
- while not os.path.exists(logPath):
- if self.process.poll() is not None:
- raise Exception("""
-couchdb exited with code %d.
-stdout:
-%s
-stderr:
-%s""" % (
- self.process.returncode, self.process.stdout.read(),
- self.process.stderr.read()))
- time.sleep(0.01)
- while os.stat(logPath).st_size == 0:
- time.sleep(0.01)
- PORT_RE = re.compile(
- 'Apache CouchDB has started on http://127.0.0.1:(?P<port>\d+)')
-
- handle = open(logPath)
- line = handle.read()
- handle.close()
- m = PORT_RE.search(line)
- if not m:
- self.stop()
- raise Exception("Cannot find port in line %s" % line)
- self.port = int(m.group('port'))
-
- def stop(self):
- self.process.terminate()
- self.process.communicate()
- os.system("rm -rf %s" % self.tempdir)
-
-
-class CouchDBTestCase(unittest.TestCase):
- """
- TestCase base class for tests against a real CouchDB server.
- """
-
- def setUp(self):
- self.wrapper = CouchDBWrapper()
- self.wrapper.start()
- #self.db = self.wrapper.db
- super(CouchDBTestCase, self).setUp()
-
- def tearDown(self):
- self.wrapper.stop()
- super(CouchDBTestCase, self).tearDown()
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_common_backend`.
-#-----------------------------------------------------------------------------
-
-class TestCouchBackendImpl(CouchDBTestCase):
-
- def test__allocate_doc_id(self):
- db = couch.CouchDatabase('http://localhost:' + str(self.wrapper.port),
- 'u1db_tests')
- doc_id1 = db._allocate_doc_id()
- self.assertTrue(doc_id1.startswith('D-'))
- self.assertEqual(34, len(doc_id1))
- int(doc_id1[len('D-'):], 16)
- self.assertNotEqual(doc_id1, db._allocate_doc_id())
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_backends`.
-#-----------------------------------------------------------------------------
-
-def make_couch_database_for_test(test, replica_uid):
- port = str(test.wrapper.port)
- return couch.CouchDatabase('http://localhost:' + port, replica_uid,
- replica_uid=replica_uid or 'test')
-
-
-def copy_couch_database_for_test(test, db):
- port = str(test.wrapper.port)
- new_db = couch.CouchDatabase('http://localhost:' + port,
- db._replica_uid + '_copy',
- replica_uid=db._replica_uid or 'test')
- gen, docs = db.get_all_docs(include_deleted=True)
- for doc in docs:
- new_db._put_doc(doc)
- new_db._transaction_log = copy.deepcopy(db._transaction_log)
- new_db._conflicts = copy.deepcopy(db._conflicts)
- new_db._other_generations = copy.deepcopy(db._other_generations)
- new_db._indexes = copy.deepcopy(db._indexes)
- new_db._set_u1db_data()
- return new_db
-
-
-COUCH_SCENARIOS = [
- ('couch', {'make_database_for_test': make_couch_database_for_test,
- 'copy_database_for_test': copy_couch_database_for_test,
- 'make_document_for_test': tests.make_document_for_test, }),
-]
-
-
-class CouchTests(test_backends.AllDatabaseTests, CouchDBTestCase):
-
- scenarios = COUCH_SCENARIOS
-
- def tearDown(self):
- self.db.delete_database()
- super(CouchTests, self).tearDown()
-
-
-class CouchDatabaseTests(test_backends.LocalDatabaseTests, CouchDBTestCase):
-
- scenarios = COUCH_SCENARIOS
-
- def tearDown(self):
- self.db.delete_database()
- super(CouchDatabaseTests, self).tearDown()
-
-
-class CouchValidateGenNTransIdTests(
- test_backends.LocalDatabaseValidateGenNTransIdTests, CouchDBTestCase):
-
- scenarios = COUCH_SCENARIOS
-
- def tearDown(self):
- self.db.delete_database()
- super(CouchValidateGenNTransIdTests, self).tearDown()
-
-
-class CouchValidateSourceGenTests(
- test_backends.LocalDatabaseValidateSourceGenTests, CouchDBTestCase):
-
- scenarios = COUCH_SCENARIOS
-
- def tearDown(self):
- self.db.delete_database()
- super(CouchValidateSourceGenTests, self).tearDown()
-
-
-class CouchWithConflictsTests(
- test_backends.LocalDatabaseWithConflictsTests, CouchDBTestCase):
-
- scenarios = COUCH_SCENARIOS
-
- def tearDown(self):
- self.db.delete_database()
- super(CouchWithConflictsTests, self).tearDown()
-
-
-# Notice: the CouchDB backend is currently used for storing encrypted data in
-# the server, so indexing makes no sense. Thus, we ignore index testing for
-# now.
-
-class CouchIndexTests(test_backends.DatabaseIndexTests, CouchDBTestCase):
-
- scenarios = COUCH_SCENARIOS
-
- def tearDown(self):
- self.db.delete_database()
- super(CouchIndexTests, self).tearDown()
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_sync`.
-#-----------------------------------------------------------------------------
-
-target_scenarios = [
- ('local', {'create_db_and_target': test_sync._make_local_db_and_target}), ]
-
-
-simple_doc = tests.simple_doc
-nested_doc = tests.nested_doc
-
-
-class CouchDatabaseSyncTargetTests(test_sync.DatabaseSyncTargetTests,
- CouchDBTestCase):
-
- scenarios = (tests.multiply_scenarios(COUCH_SCENARIOS, target_scenarios))
-
- def tearDown(self):
- self.db.delete_database()
- super(CouchDatabaseSyncTargetTests, self).tearDown()
-
- def test_sync_exchange_returns_many_new_docs(self):
- # This test was replicated to allow dictionaries to be compared after
- # JSON expansion (because one dictionary may have many different
- # serialized representations).
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertTransactionLog([doc.doc_id, doc2.doc_id], self.db)
- new_gen, _ = self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc2.doc_id], self.db)
- self.assertEqual(2, new_gen)
- self.assertEqual(
- [(doc.doc_id, doc.rev, json.loads(simple_doc), 1),
- (doc2.doc_id, doc2.rev, json.loads(nested_doc), 2)],
- [c[:-3] + (json.loads(c[-3]), c[-2]) for c in self.other_changes])
- if self.whitebox:
- self.assertEqual(
- self.db._last_exchange_log['return'],
- {'last_gen': 2, 'docs':
- [(doc.doc_id, doc.rev), (doc2.doc_id, doc2.rev)]})
-
-
-sync_scenarios = []
-for name, scenario in COUCH_SCENARIOS:
- scenario = dict(scenario)
- scenario['do_sync'] = test_sync.sync_via_synchronizer
- sync_scenarios.append((name, scenario))
- scenario = dict(scenario)
-
-
-class CouchDatabaseSyncTests(test_sync.DatabaseSyncTests, CouchDBTestCase):
-
- scenarios = sync_scenarios
-
- def setUp(self):
- self.db = None
- self.db1 = None
- self.db2 = None
- self.db3 = None
- super(CouchDatabaseSyncTests, self).setUp()
-
- def tearDown(self):
- self.db and self.db.delete_database()
- self.db1 and self.db1.delete_database()
- self.db2 and self.db2.delete_database()
- self.db3 and self.db3.delete_database()
- db = self.create_database('test1_copy', 'source')
- db.delete_database()
- db = self.create_database('test2_copy', 'target')
- db.delete_database()
- db = self.create_database('test3', 'target')
- db.delete_database()
- super(CouchDatabaseSyncTests, self).tearDown()
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/tests/test_encrypted.py b/src/leap/soledad/tests/test_encrypted.py
deleted file mode 100644
index 9fc81bc3..00000000
--- a/src/leap/soledad/tests/test_encrypted.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from leap.soledad.backends.leap_backend import LeapDocument
-from leap.soledad.tests import BaseSoledadTest
-
-
-class EncryptedSyncTestCase(BaseSoledadTest):
-
- def test_get_set_encrypted(self):
- doc1 = LeapDocument(soledad=self._soledad)
- doc1.content = {'key': 'val'}
- doc2 = LeapDocument(doc_id=doc1.doc_id,
- encrypted_json=doc1.get_encrypted_json(),
- soledad=self._soledad)
- res1 = doc1.get_json()
- res2 = doc2.get_json()
- self.assertEqual(res1, res2, 'incorrect document encryption')
diff --git a/src/leap/soledad/tests/test_leap_backend.py b/src/leap/soledad/tests/test_leap_backend.py
deleted file mode 100644
index a061533c..00000000
--- a/src/leap/soledad/tests/test_leap_backend.py
+++ /dev/null
@@ -1,207 +0,0 @@
-"""Test ObjectStore backend bits.
-
-For these tests to run, a leap server has to be running on (default) port
-5984.
-"""
-
-import u1db
-from leap.soledad.backends import leap_backend
-from leap.soledad.tests import u1db_tests as tests
-from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
- make_http_app,
- make_oauth_http_app,
-)
-from leap.soledad.tests import BaseSoledadTest
-from leap.soledad.tests.u1db_tests import test_backends
-from leap.soledad.tests.u1db_tests import test_http_database
-from leap.soledad.tests.u1db_tests import test_http_client
-from leap.soledad.tests.u1db_tests import test_document
-from leap.soledad.tests.u1db_tests import test_remote_sync_target
-from leap.soledad.tests.u1db_tests import test_https
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_backends`.
-#-----------------------------------------------------------------------------
-
-def make_leap_document_for_test(test, doc_id, rev, content,
- has_conflicts=False):
- return leap_backend.LeapDocument(
- doc_id, rev, content, has_conflicts=has_conflicts,
- soledad=test._soledad)
-
-
-def make_leap_encrypted_document_for_test(test, doc_id, rev, encrypted_content,
- has_conflicts=False):
- return leap_backend.LeapDocument(
- doc_id, rev, encrypted_json=encrypted_content,
- has_conflicts=has_conflicts,
- soledad=test._soledad)
-
-
-LEAP_SCENARIOS = [
- ('http', {
- 'make_database_for_test': test_backends.make_http_database_for_test,
- 'copy_database_for_test': test_backends.copy_http_database_for_test,
- 'make_document_for_test': make_leap_document_for_test,
- 'make_app_with_state': make_http_app}),
-]
-
-
-class LeapTests(test_backends.AllDatabaseTests, BaseSoledadTest):
-
- scenarios = LEAP_SCENARIOS
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_http_client`.
-#-----------------------------------------------------------------------------
-
-class TestLeapClientBase(test_http_client.TestHTTPClientBase):
- pass
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_document`.
-#-----------------------------------------------------------------------------
-
-class TestLeapDocument(test_document.TestDocument, BaseSoledadTest):
-
- scenarios = ([(
- 'leap', {'make_document_for_test': make_leap_document_for_test})])
-
-
-class TestLeapPyDocument(test_document.TestPyDocument, BaseSoledadTest):
-
- scenarios = ([(
- 'leap', {'make_document_for_test': make_leap_document_for_test})])
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_remote_sync_target`.
-#-----------------------------------------------------------------------------
-
-class TestLeapSyncTargetBasics(
- test_remote_sync_target.TestHTTPSyncTargetBasics):
-
- def test_parse_url(self):
- remote_target = leap_backend.LeapSyncTarget('http://127.0.0.1:12345/')
- self.assertEqual('http', remote_target._url.scheme)
- self.assertEqual('127.0.0.1', remote_target._url.hostname)
- self.assertEqual(12345, remote_target._url.port)
- self.assertEqual('/', remote_target._url.path)
-
-
-class TestLeapParsingSyncStream(test_remote_sync_target.TestParsingSyncStream):
-
- def test_wrong_start(self):
- tgt = leap_backend.LeapSyncTarget("http://foo/foo")
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream, "{}\r\n]", None)
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream, "\r\n{}\r\n]", None)
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream, "", None)
-
- def test_wrong_end(self):
- tgt = leap_backend.LeapSyncTarget("http://foo/foo")
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n{}", None)
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n", None)
-
- def test_missing_comma(self):
- tgt = leap_backend.LeapSyncTarget("http://foo/foo")
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream,
- '[\r\n{}\r\n{"id": "i", "rev": "r", '
- '"content": "c", "gen": 3}\r\n]', None)
-
- def test_no_entries(self):
- tgt = leap_backend.LeapSyncTarget("http://foo/foo")
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n]", None)
-
- def test_extra_comma(self):
- tgt = leap_backend.LeapSyncTarget("http://foo/foo")
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n{},\r\n]", None)
-
- self.assertRaises(leap_backend.NoSoledadInstance,
- tgt._parse_sync_stream,
- '[\r\n{},\r\n{"id": "i", "rev": "r", '
- '"content": "{}", "gen": 3, "trans_id": "T-sid"}'
- ',\r\n]',
- lambda doc, gen, trans_id: None)
-
- def test_error_in_stream(self):
- tgt = leap_backend.LeapSyncTarget("http://foo/foo")
-
- self.assertRaises(u1db.errors.Unavailable,
- tgt._parse_sync_stream,
- '[\r\n{"new_generation": 0},'
- '\r\n{"error": "unavailable"}\r\n', None)
-
- self.assertRaises(u1db.errors.Unavailable,
- tgt._parse_sync_stream,
- '[\r\n{"error": "unavailable"}\r\n', None)
-
- self.assertRaises(u1db.errors.BrokenSyncStream,
- tgt._parse_sync_stream,
- '[\r\n{"error": "?"}\r\n', None)
-
-
-def leap_sync_target(test, path):
- return leap_backend.LeapSyncTarget(test.getURL(path))
-
-
-def oauth_leap_sync_target(test, path):
- st = leap_sync_target(test, '~/' + path)
- st.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return st
-
-
-class TestRemoteSyncTargets(tests.TestCaseWithServer):
-
- scenarios = [
- ('http', {'make_app_with_state': make_http_app,
- 'make_document_for_test': make_leap_document_for_test,
- 'sync_target': leap_sync_target}),
- ('oauth_http', {'make_app_with_state': make_oauth_http_app,
- 'make_document_for_test': make_leap_document_for_test,
- 'sync_target': oauth_leap_sync_target}),
- ]
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_https`.
-#-----------------------------------------------------------------------------
-
-def oauth_https_sync_target(test, host, path):
- _, port = test.server.server_address
- st = leap_backend.LeapSyncTarget('https://%s:%d/~/%s' % (host, port, path))
- st.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return st
-
-
-class TestLeapSyncTargetHttpsSupport(test_https.TestHttpSyncTargetHttpsSupport,
- BaseSoledadTest):
-
- scenarios = [
- ('oauth_https', {'server_def': test_https.https_server_def,
- 'make_app_with_state': make_oauth_http_app,
- 'make_document_for_test': make_leap_document_for_test,
- 'sync_target': oauth_https_sync_target,
- }), ]
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/tests/test_sqlcipher.py b/src/leap/soledad/tests/test_sqlcipher.py
deleted file mode 100644
index ee9b38dd..00000000
--- a/src/leap/soledad/tests/test_sqlcipher.py
+++ /dev/null
@@ -1,501 +0,0 @@
-"""Test sqlcipher backend internals."""
-
-import os
-import time
-from pysqlcipher import dbapi2
-import unittest
-from StringIO import StringIO
-import threading
-
-# u1db stuff.
-from u1db import (
- errors,
- query_parser,
- sync,
-)
-from u1db.backends.sqlite_backend import SQLitePartialExpandDatabase
-
-# soledad stuff.
-from leap.soledad.backends.sqlcipher import (
- SQLCipherDatabase,
- DatabaseIsNotEncrypted,
-)
-from leap.soledad.backends.sqlcipher import open as u1db_open
-from leap.soledad.backends.leap_backend import LeapDocument
-
-# u1db tests stuff.
-from leap.soledad.tests import u1db_tests as tests
-from leap.soledad.tests.u1db_tests import test_sqlite_backend
-from leap.soledad.tests.u1db_tests import test_backends
-from leap.soledad.tests.u1db_tests import test_open
-from leap.soledad.tests.u1db_tests import test_sync
-from leap.soledad.backends.leap_backend import LeapSyncTarget
-from leap.testing.basetest import BaseLeapTest
-
-PASSWORD = '123456'
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_common_backend`.
-#-----------------------------------------------------------------------------
-
-class TestSQLCipherBackendImpl(tests.TestCase):
-
- def test__allocate_doc_id(self):
- db = SQLCipherDatabase(':memory:', PASSWORD)
- doc_id1 = db._allocate_doc_id()
- self.assertTrue(doc_id1.startswith('D-'))
- self.assertEqual(34, len(doc_id1))
- int(doc_id1[len('D-'):], 16)
- self.assertNotEqual(doc_id1, db._allocate_doc_id())
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_backends`.
-#-----------------------------------------------------------------------------
-
-def make_sqlcipher_database_for_test(test, replica_uid):
- db = SQLCipherDatabase(':memory:', PASSWORD)
- db._set_replica_uid(replica_uid)
- return db
-
-
-def copy_sqlcipher_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- new_db = SQLCipherDatabase(':memory:', PASSWORD)
- tmpfile = StringIO()
- for line in db._db_handle.iterdump():
- if not 'sqlite_sequence' in line: # work around bug in iterdump
- tmpfile.write('%s\n' % line)
- tmpfile.seek(0)
- new_db._db_handle = dbapi2.connect(':memory:')
- new_db._db_handle.cursor().executescript(tmpfile.read())
- new_db._db_handle.commit()
- new_db._set_replica_uid(db._replica_uid)
- new_db._factory = db._factory
- return new_db
-
-
-def make_document_for_test(test, doc_id, rev, content, has_conflicts=False):
- return LeapDocument(doc_id, rev, content, has_conflicts=has_conflicts)
-
-
-SQLCIPHER_SCENARIOS = [
- ('sqlcipher', {'make_database_for_test': make_sqlcipher_database_for_test,
- 'copy_database_for_test': copy_sqlcipher_database_for_test,
- 'make_document_for_test': make_document_for_test, }),
-]
-
-
-class SQLCipherTests(test_backends.AllDatabaseTests):
- scenarios = SQLCIPHER_SCENARIOS
-
-
-class SQLCipherDatabaseTests(test_backends.LocalDatabaseTests):
- scenarios = SQLCIPHER_SCENARIOS
-
-
-class SQLCipherValidateGenNTransIdTests(
- test_backends.LocalDatabaseValidateGenNTransIdTests):
- scenarios = SQLCIPHER_SCENARIOS
-
-
-class SQLCipherValidateSourceGenTests(
- test_backends.LocalDatabaseValidateSourceGenTests):
- scenarios = SQLCIPHER_SCENARIOS
-
-
-class SQLCipherWithConflictsTests(
- test_backends.LocalDatabaseWithConflictsTests):
- scenarios = SQLCIPHER_SCENARIOS
-
-
-class SQLCipherIndexTests(test_backends.DatabaseIndexTests):
- scenarios = SQLCIPHER_SCENARIOS
-
-
-load_tests = tests.load_with_scenarios
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_sqlite_backend`.
-#-----------------------------------------------------------------------------
-
-class TestSQLCipherDatabase(test_sqlite_backend.TestSQLiteDatabase):
-
- def test_atomic_initialize(self):
- tmpdir = self.createTempDir()
- dbname = os.path.join(tmpdir, 'atomic.db')
-
- t2 = None # will be a thread
-
- class SQLCipherDatabaseTesting(SQLCipherDatabase):
- _index_storage_value = "testing"
-
- def __init__(self, dbname, ntry):
- self._try = ntry
- self._is_initialized_invocations = 0
- super(SQLCipherDatabaseTesting, self).__init__(dbname, PASSWORD)
-
- def _is_initialized(self, c):
- res = super(SQLCipherDatabaseTesting, self)._is_initialized(c)
- if self._try == 1:
- self._is_initialized_invocations += 1
- if self._is_initialized_invocations == 2:
- t2.start()
- # hard to do better and have a generic test
- time.sleep(0.05)
- return res
-
- outcome2 = []
-
- def second_try():
- try:
- db2 = SQLCipherDatabaseTesting(dbname, 2)
- except Exception, e:
- outcome2.append(e)
- else:
- outcome2.append(db2)
-
- t2 = threading.Thread(target=second_try)
- db1 = SQLCipherDatabaseTesting(dbname, 1)
- t2.join()
-
- self.assertIsInstance(outcome2[0], SQLCipherDatabaseTesting)
- db2 = outcome2[0]
- self.assertTrue(db2._is_initialized(db1._get_sqlite_handle().cursor()))
-
-
-class TestAlternativeDocument(LeapDocument):
- """A (not very) alternative implementation of Document."""
-
-
-class TestSQLCipherPartialExpandDatabase(
- test_sqlite_backend.TestSQLitePartialExpandDatabase):
-
- # The following tests had to be cloned from u1db because they all
- # instantiate the backend directly, so we need to change that in order to
- # our backend be instantiated in place.
-
- def setUp(self):
- super(test_sqlite_backend.TestSQLitePartialExpandDatabase,
- self).setUp()
- self.db = SQLCipherDatabase(':memory:', PASSWORD)
- self.db._set_replica_uid('test')
-
- def test_default_replica_uid(self):
- self.db = SQLCipherDatabase(':memory:', PASSWORD)
- self.assertIsNot(None, self.db._replica_uid)
- self.assertEqual(32, len(self.db._replica_uid))
- int(self.db._replica_uid, 16)
-
- def test__parse_index(self):
- self.db = SQLCipherDatabase(':memory:', PASSWORD)
- g = self.db._parse_index_definition('fieldname')
- self.assertIsInstance(g, query_parser.ExtractField)
- self.assertEqual(['fieldname'], g.field)
-
- def test__update_indexes(self):
- self.db = SQLCipherDatabase(':memory:', PASSWORD)
- g = self.db._parse_index_definition('fieldname')
- c = self.db._get_sqlite_handle().cursor()
- self.db._update_indexes('doc-id', {'fieldname': 'val'},
- [('fieldname', g)], c)
- c.execute('SELECT doc_id, field_name, value FROM document_fields')
- self.assertEqual([('doc-id', 'fieldname', 'val')],
- c.fetchall())
-
- def test__set_replica_uid(self):
- # Start from scratch, so that replica_uid isn't set.
- self.db = SQLCipherDatabase(':memory:', PASSWORD)
- self.assertIsNot(None, self.db._real_replica_uid)
- self.assertIsNot(None, self.db._replica_uid)
- self.db._set_replica_uid('foo')
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT value FROM u1db_config WHERE name='replica_uid'")
- self.assertEqual(('foo',), c.fetchone())
- self.assertEqual('foo', self.db._real_replica_uid)
- self.assertEqual('foo', self.db._replica_uid)
- self.db._close_sqlite_handle()
- self.assertEqual('foo', self.db._replica_uid)
-
- def test__open_database(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/test.sqlite'
- SQLCipherDatabase(path, PASSWORD)
- db2 = SQLCipherDatabase._open_database(path, PASSWORD)
- self.assertIsInstance(db2, SQLCipherDatabase)
-
- def test__open_database_with_factory(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/test.sqlite'
- SQLCipherDatabase(path, PASSWORD)
- db2 = SQLCipherDatabase._open_database(
- path, PASSWORD,
- document_factory=TestAlternativeDocument)
- doc = db2.create_doc({})
- self.assertTrue(isinstance(doc, LeapDocument))
-
- def test__open_database_non_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
- self.assertRaises(errors.DatabaseDoesNotExist,
- SQLCipherDatabase._open_database,
- path, PASSWORD)
-
- def test__open_database_during_init(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/initialised.db'
- db = SQLCipherDatabase.__new__(
- SQLCipherDatabase)
- db._db_handle = dbapi2.connect(path) # db is there but not yet init-ed
- c = db._db_handle.cursor()
- c.execute('PRAGMA key="%s"' % PASSWORD)
- self.addCleanup(db.close)
- observed = []
-
- class SQLiteDatabaseTesting(SQLCipherDatabase):
- WAIT_FOR_PARALLEL_INIT_HALF_INTERVAL = 0.1
-
- @classmethod
- def _which_index_storage(cls, c):
- res = super(SQLiteDatabaseTesting, cls)._which_index_storage(c)
- db._ensure_schema() # init db
- observed.append(res[0])
- return res
-
- db2 = SQLiteDatabaseTesting._open_database(path, PASSWORD)
- self.addCleanup(db2.close)
- self.assertIsInstance(db2, SQLCipherDatabase)
- self.assertEqual(
- [None,
- SQLCipherDatabase._index_storage_value],
- observed)
-
- def test__open_database_invalid(self):
- class SQLiteDatabaseTesting(SQLCipherDatabase):
- WAIT_FOR_PARALLEL_INIT_HALF_INTERVAL = 0.1
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path1 = temp_dir + '/invalid1.db'
- with open(path1, 'wb') as f:
- f.write("")
- self.assertRaises(dbapi2.OperationalError,
- SQLiteDatabaseTesting._open_database, path1,
- PASSWORD)
- with open(path1, 'wb') as f:
- f.write("invalid")
- self.assertRaises(dbapi2.DatabaseError,
- SQLiteDatabaseTesting._open_database, path1,
- PASSWORD)
-
-
- def test_open_database_existing(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/existing.sqlite'
- SQLCipherDatabase(path, PASSWORD)
- db2 = SQLCipherDatabase.open_database(path, PASSWORD, create=False)
- self.assertIsInstance(db2, SQLCipherDatabase)
-
- def test_open_database_with_factory(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/existing.sqlite'
- SQLCipherDatabase(path, PASSWORD)
- db2 = SQLCipherDatabase.open_database(
- path, PASSWORD, create=False,
- document_factory=TestAlternativeDocument)
- doc = db2.create_doc({})
- self.assertTrue(isinstance(doc, LeapDocument))
-
- def test_open_database_create(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/new.sqlite'
- SQLCipherDatabase.open_database(path, PASSWORD, create=True)
- db2 = SQLCipherDatabase.open_database(path, PASSWORD, create=False)
- self.assertIsInstance(db2, SQLCipherDatabase)
-
- def test_create_database_initializes_schema(self):
- # This test had to be cloned because our implementation of SQLCipher
- # backend is referenced with an index_storage_value that includes the
- # word "encrypted". See u1db's sqlite_backend and our
- # sqlcipher_backend for reference.
- raw_db = self.db._get_sqlite_handle()
- c = raw_db.cursor()
- c.execute("SELECT * FROM u1db_config")
- config = dict([(r[0], r[1]) for r in c.fetchall()])
- self.assertEqual({'sql_schema': '0', 'replica_uid': 'test',
- 'index_storage': 'expand referenced encrypted'},
- config)
-
- def test_store_syncable(self):
- doc = self.db.create_doc_from_json(tests.simple_doc)
- # assert that docs are syncable by default
- self.assertEqual(True, doc.syncable)
- # assert that we can store syncable = False
- doc.syncable = False
- self.db.put_doc(doc)
- self.assertEqual(False, self.db.get_doc(doc.doc_id).syncable)
- # assert that we can store syncable = True
- doc.syncable = True
- self.db.put_doc(doc)
- self.assertEqual(True, self.db.get_doc(doc.doc_id).syncable)
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_open`.
-#-----------------------------------------------------------------------------
-
-class SQLCipherOpen(test_open.TestU1DBOpen):
-
- def test_open_no_create(self):
- self.assertRaises(errors.DatabaseDoesNotExist,
- u1db_open, self.db_path,
- password=PASSWORD,
- create=False)
- self.assertFalse(os.path.exists(self.db_path))
-
- def test_open_create(self):
- db = u1db_open(self.db_path, password=PASSWORD, create=True)
- self.addCleanup(db.close)
- self.assertTrue(os.path.exists(self.db_path))
- self.assertIsInstance(db, SQLCipherDatabase)
-
- def test_open_with_factory(self):
- db = u1db_open(self.db_path, password=PASSWORD, create=True,
- document_factory=TestAlternativeDocument)
- self.addCleanup(db.close)
- doc = db.create_doc({})
- self.assertTrue(isinstance(doc, LeapDocument))
-
- def test_open_existing(self):
- db = SQLCipherDatabase(self.db_path, PASSWORD)
- self.addCleanup(db.close)
- doc = db.create_doc_from_json(tests.simple_doc)
- # Even though create=True, we shouldn't wipe the db
- db2 = u1db_open(self.db_path, password=PASSWORD, create=True)
- self.addCleanup(db2.close)
- doc2 = db2.get_doc(doc.doc_id)
- self.assertEqual(doc, doc2)
-
- def test_open_existing_no_create(self):
- db = SQLCipherDatabase(self.db_path, PASSWORD)
- self.addCleanup(db.close)
- db2 = u1db_open(self.db_path, password=PASSWORD, create=False)
- self.addCleanup(db2.close)
- self.assertIsInstance(db2, SQLCipherDatabase)
-
-
-#-----------------------------------------------------------------------------
-# The following tests come from `u1db.tests.test_sync`.
-#-----------------------------------------------------------------------------
-
-sync_scenarios = []
-for name, scenario in SQLCIPHER_SCENARIOS:
- scenario = dict(scenario)
- scenario['do_sync'] = test_sync.sync_via_synchronizer
- sync_scenarios.append((name, scenario))
- scenario = dict(scenario)
-
-
-def sync_via_synchronizer_and_leap(test, db_source, db_target,
- trace_hook=None, trace_hook_shallow=None):
- if trace_hook:
- test.skipTest("full trace hook unsupported over http")
- path = test._http_at[db_target]
- target = LeapSyncTarget.connect(test.getURL(path))
- if trace_hook_shallow:
- target._set_trace_hook_shallow(trace_hook_shallow)
- return sync.Synchronizer(db_source, target).sync()
-
-
-sync_scenarios.append(('pyleap', {
- 'make_database_for_test': test_sync.make_database_for_http_test,
- 'copy_database_for_test': test_sync.copy_database_for_http_test,
- 'make_document_for_test': tests.make_document_for_test,
- 'make_app_with_state': tests.test_remote_sync_target.make_http_app,
- 'do_sync': sync_via_synchronizer_and_leap,
-}))
-
-
-class SQLCipherDatabaseSyncTests(test_sync.DatabaseSyncTests):
-
- scenarios = sync_scenarios
-
-
-def _make_local_db_and_leap_target(test, path='test'):
- test.startServer()
- db = test.request_state._create_database(os.path.basename(path))
- st = LeapSyncTarget.connect(test.getURL(path))
- return db, st
-
-
-target_scenarios = [
- ('leap', {
- 'create_db_and_target': _make_local_db_and_leap_target,
- 'make_app_with_state': tests.test_remote_sync_target.make_http_app}),
-]
-
-
-class SQLCipherSyncTargetTests(test_sync.DatabaseSyncTargetTests):
-
- scenarios = (tests.multiply_scenarios(SQLCIPHER_SCENARIOS,
- target_scenarios))
-
-
-#-----------------------------------------------------------------------------
-# Tests for actual encryption of the database
-#-----------------------------------------------------------------------------
-
-class SQLCipherEncryptionTest(BaseLeapTest):
-
- def delete_dbfiles(self):
- for dbfile in [self.DB_FILE]:
- if os.path.exists(dbfile):
- os.unlink(dbfile)
-
- def setUp(self):
- self.DB_FILE = self.tempdir + '/test.db'
- self.delete_dbfiles()
-
- def tearDown(self):
- self.delete_dbfiles()
-
- def test_try_to_open_encrypted_db_with_sqlite_backend(self):
- db = SQLCipherDatabase(self.DB_FILE, PASSWORD)
- doc = db.create_doc_from_json(tests.simple_doc)
- db.close()
- try:
- # trying to open an encrypted database with the regular u1db
- # backend should raise a DatabaseError exception.
- SQLitePartialExpandDatabase(self.DB_FILE,
- document_factory=LeapDocument)
- raise DatabaseIsNotEncrypted()
- except dbapi2.DatabaseError:
- # at this point we know that the regular U1DB sqlcipher backend
- # did not succeed on opening the database, so it was indeed
- # encrypted.
- db = SQLCipherDatabase(self.DB_FILE, PASSWORD)
- doc = db.get_doc(doc.doc_id)
- self.assertEqual(tests.simple_doc, doc.get_json(),
- 'decrypted content mismatch')
-
- def test_try_to_open_raw_db_with_sqlcipher_backend(self):
- db = SQLitePartialExpandDatabase(self.DB_FILE,
- document_factory=LeapDocument)
- db.create_doc_from_json(tests.simple_doc)
- db.close()
- try:
- # trying to open the a non-encrypted database with sqlcipher
- # backend should raise a DatabaseIsNotEncrypted exception.
- SQLCipherDatabase(self.DB_FILE, PASSWORD)
- raise db1pi2.DatabaseError(
- "SQLCipher backend should not be able to open non-encrypted "
- "dbs.")
- except DatabaseIsNotEncrypted:
- pass
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/tests/u1db_tests/README b/src/leap/soledad/tests/u1db_tests/README
deleted file mode 100644
index 605f01fa..00000000
--- a/src/leap/soledad/tests/u1db_tests/README
+++ /dev/null
@@ -1,34 +0,0 @@
-General info
-------------
-
-Test files in this directory are derived from u1db-0.1.4 tests. The main
-difference is that:
-
- (1) they include the test infrastructure packed with soledad; and
- (2) they do not include c_backend_wrapper testing.
-
-Dependencies
-------------
-
-u1db tests depend on the following python packages:
-
- nose2
- unittest2
- mercurial
- hgtools
- testtools
- discover
- oauth
- testscenarios
- dirspec
- paste
- routes
- simplejson
- cython
-
-Running tests
--------------
-
-Use nose2 to run tests:
-
- nose2 leap.soledad.tests.u1db_tests
diff --git a/src/leap/soledad/tests/u1db_tests/__init__.py b/src/leap/soledad/tests/u1db_tests/__init__.py
deleted file mode 100644
index 43304b43..00000000
--- a/src/leap/soledad/tests/u1db_tests/__init__.py
+++ /dev/null
@@ -1,421 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test infrastructure for U1DB"""
-
-import copy
-import shutil
-import socket
-import tempfile
-import threading
-
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-from wsgiref import simple_server
-
-from oauth import oauth
-from pysqlcipher import dbapi2
-from StringIO import StringIO
-
-import testscenarios
-import testtools
-
-from u1db import (
- errors,
- Document,
-)
-from u1db.backends import (
- inmemory,
- sqlite_backend,
-)
-from u1db.remote import (
- server_state,
-)
-
-
-class TestCase(testtools.TestCase):
-
- def createTempDir(self, prefix='u1db-tmp-'):
- """Create a temporary directory to do some work in.
-
- This directory will be scheduled for cleanup when the test ends.
- """
- tempdir = tempfile.mkdtemp(prefix=prefix)
- self.addCleanup(shutil.rmtree, tempdir)
- return tempdir
-
- def make_document(self, doc_id, doc_rev, content, has_conflicts=False):
- return self.make_document_for_test(
- self, doc_id, doc_rev, content, has_conflicts)
-
- def make_document_for_test(self, test, doc_id, doc_rev, content,
- has_conflicts):
- return make_document_for_test(
- test, doc_id, doc_rev, content, has_conflicts)
-
- def assertGetDoc(self, db, doc_id, doc_rev, content, has_conflicts):
- """Assert that the document in the database looks correct."""
- exp_doc = self.make_document(doc_id, doc_rev, content,
- has_conflicts=has_conflicts)
- self.assertEqual(exp_doc, db.get_doc(doc_id))
-
- def assertGetDocIncludeDeleted(self, db, doc_id, doc_rev, content,
- has_conflicts):
- """Assert that the document in the database looks correct."""
- exp_doc = self.make_document(doc_id, doc_rev, content,
- has_conflicts=has_conflicts)
- self.assertEqual(exp_doc, db.get_doc(doc_id, include_deleted=True))
-
- def assertGetDocConflicts(self, db, doc_id, conflicts):
- """Assert what conflicts are stored for a given doc_id.
-
- :param conflicts: A list of (doc_rev, content) pairs.
- The first item must match the first item returned from the
- database, however the rest can be returned in any order.
- """
- if conflicts:
- conflicts = [(rev,
- (json.loads(cont) if isinstance(cont, basestring)
- else cont)) for (rev, cont) in conflicts]
- conflicts = conflicts[:1] + sorted(conflicts[1:])
- actual = db.get_doc_conflicts(doc_id)
- if actual:
- actual = [
- (doc.rev, (json.loads(doc.get_json())
- if doc.get_json() is not None else None))
- for doc in actual]
- actual = actual[:1] + sorted(actual[1:])
- self.assertEqual(conflicts, actual)
-
-
-def multiply_scenarios(a_scenarios, b_scenarios):
- """Create the cross-product of scenarios."""
-
- all_scenarios = []
- for a_name, a_attrs in a_scenarios:
- for b_name, b_attrs in b_scenarios:
- name = '%s,%s' % (a_name, b_name)
- attrs = dict(a_attrs)
- attrs.update(b_attrs)
- all_scenarios.append((name, attrs))
- return all_scenarios
-
-
-simple_doc = '{"key": "value"}'
-nested_doc = '{"key": "value", "sub": {"doc": "underneath"}}'
-
-
-def make_memory_database_for_test(test, replica_uid):
- return inmemory.InMemoryDatabase(replica_uid)
-
-
-def copy_memory_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- new_db = inmemory.InMemoryDatabase(db._replica_uid)
- new_db._transaction_log = db._transaction_log[:]
- new_db._docs = copy.deepcopy(db._docs)
- new_db._conflicts = copy.deepcopy(db._conflicts)
- new_db._indexes = copy.deepcopy(db._indexes)
- new_db._factory = db._factory
- return new_db
-
-
-def make_sqlite_partial_expanded_for_test(test, replica_uid):
- db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- db._set_replica_uid(replica_uid)
- return db
-
-
-def copy_sqlite_partial_expanded_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- new_db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- tmpfile = StringIO()
- for line in db._db_handle.iterdump():
- if not 'sqlite_sequence' in line: # work around bug in iterdump
- tmpfile.write('%s\n' % line)
- tmpfile.seek(0)
- new_db._db_handle = dbapi2.connect(':memory:')
- new_db._db_handle.cursor().executescript(tmpfile.read())
- new_db._db_handle.commit()
- new_db._set_replica_uid(db._replica_uid)
- new_db._factory = db._factory
- return new_db
-
-
-def make_document_for_test(test, doc_id, rev, content, has_conflicts=False):
- return Document(doc_id, rev, content, has_conflicts=has_conflicts)
-
-
-LOCAL_DATABASES_SCENARIOS = [
- ('mem', {'make_database_for_test': make_memory_database_for_test,
- 'copy_database_for_test': copy_memory_database_for_test,
- 'make_document_for_test': make_document_for_test}),
- ('sql', {'make_database_for_test':
- make_sqlite_partial_expanded_for_test,
- 'copy_database_for_test':
- copy_sqlite_partial_expanded_for_test,
- 'make_document_for_test': make_document_for_test}),
-]
-
-
-class DatabaseBaseTests(TestCase):
-
- accept_fixed_trans_id = False # set to True assertTransactionLog
- # is happy with all trans ids = ''
-
- scenarios = LOCAL_DATABASES_SCENARIOS
-
- def create_database(self, replica_uid):
- return self.make_database_for_test(self, replica_uid)
-
- def copy_database(self, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES
- # IS THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST
- # THAT WE CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS
- # RATHER THAN CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND
- # NINJA TO YOUR HOUSE.
- return self.copy_database_for_test(self, db)
-
- def setUp(self):
- super(DatabaseBaseTests, self).setUp()
- self.db = self.create_database('test')
-
- def tearDown(self):
- # TODO: Add close_database parameterization
- # self.close_database(self.db)
- super(DatabaseBaseTests, self).tearDown()
-
- def assertTransactionLog(self, doc_ids, db):
- """Assert that the given docs are in the transaction log."""
- log = db._get_transaction_log()
- just_ids = []
- seen_transactions = set()
- for doc_id, transaction_id in log:
- just_ids.append(doc_id)
- self.assertIsNot(None, transaction_id,
- "Transaction id should not be None")
- if transaction_id == '' and self.accept_fixed_trans_id:
- continue
- self.assertNotEqual('', transaction_id,
- "Transaction id should be a unique string")
- self.assertTrue(transaction_id.startswith('T-'))
- self.assertNotIn(transaction_id, seen_transactions)
- seen_transactions.add(transaction_id)
- self.assertEqual(doc_ids, just_ids)
-
- def getLastTransId(self, db):
- """Return the transaction id for the last database update."""
- return self.db._get_transaction_log()[-1][-1]
-
-
-class ServerStateForTests(server_state.ServerState):
- """Used in the test suite, so we don't have to touch disk, etc."""
-
- def __init__(self):
- super(ServerStateForTests, self).__init__()
- self._dbs = {}
-
- def open_database(self, path):
- try:
- return self._dbs[path]
- except KeyError:
- raise errors.DatabaseDoesNotExist
-
- def check_database(self, path):
- # cares only about the possible exception
- self.open_database(path)
-
- def ensure_database(self, path):
- try:
- db = self.open_database(path)
- except errors.DatabaseDoesNotExist:
- db = self._create_database(path)
- return db, db._replica_uid
-
- def _copy_database(self, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES
- # IS THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST
- # THAT WE CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS
- # RATHER THAN CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND
- # NINJA TO YOUR HOUSE.
- new_db = copy_memory_database_for_test(None, db)
- path = db._replica_uid
- while path in self._dbs:
- path += 'copy'
- self._dbs[path] = new_db
- return new_db
-
- def _create_database(self, path):
- db = inmemory.InMemoryDatabase(path)
- self._dbs[path] = db
- return db
-
- def delete_database(self, path):
- del self._dbs[path]
-
-
-class ResponderForTests(object):
- """Responder for tests."""
- _started = False
- sent_response = False
- status = None
-
- def start_response(self, status='success', **kwargs):
- self._started = True
- self.status = status
- self.kwargs = kwargs
-
- def send_response(self, status='success', **kwargs):
- self.start_response(status, **kwargs)
- self.finish_response()
-
- def finish_response(self):
- self.sent_response = True
-
-
-class TestCaseWithServer(TestCase):
-
- @staticmethod
- def server_def():
- # hook point
- # should return (ServerClass, "shutdown method name", "url_scheme")
- class _RequestHandler(simple_server.WSGIRequestHandler):
- def log_request(*args):
- pass # suppress
-
- def make_server(host_port, application):
- assert application, "forgot to override make_app(_with_state)?"
- srv = simple_server.WSGIServer(host_port, _RequestHandler)
- # patch the value in if it's None
- if getattr(application, 'base_url', 1) is None:
- application.base_url = "http://%s:%s" % srv.server_address
- srv.set_app(application)
- return srv
-
- return make_server, "shutdown", "http"
-
- @staticmethod
- def make_app_with_state(state):
- # hook point
- return None
-
- def make_app(self):
- # potential hook point
- self.request_state = ServerStateForTests()
- return self.make_app_with_state(self.request_state)
-
- def setUp(self):
- super(TestCaseWithServer, self).setUp()
- self.server = self.server_thread = None
-
- @property
- def url_scheme(self):
- return self.server_def()[-1]
-
- def startServer(self):
- server_def = self.server_def()
- server_class, shutdown_meth, _ = server_def
- application = self.make_app()
- self.server = server_class(('127.0.0.1', 0), application)
- self.server_thread = threading.Thread(target=self.server.serve_forever,
- kwargs=dict(poll_interval=0.01))
- self.server_thread.start()
- self.addCleanup(self.server_thread.join)
- self.addCleanup(getattr(self.server, shutdown_meth))
-
- def getURL(self, path=None):
- host, port = self.server.server_address
- if path is None:
- path = ''
- return '%s://%s:%s/%s' % (self.url_scheme, host, port, path)
-
-
-def socket_pair():
- """Return a pair of TCP sockets connected to each other.
-
- Unlike socket.socketpair, this should work on Windows.
- """
- sock_pair = getattr(socket, 'socket_pair', None)
- if sock_pair:
- return sock_pair(socket.AF_INET, socket.SOCK_STREAM)
- listen_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- listen_sock.bind(('127.0.0.1', 0))
- listen_sock.listen(1)
- client_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- client_sock.connect(listen_sock.getsockname())
- server_sock, addr = listen_sock.accept()
- listen_sock.close()
- return server_sock, client_sock
-
-
-# OAuth related testing
-
-consumer1 = oauth.OAuthConsumer('K1', 'S1')
-token1 = oauth.OAuthToken('kkkk1', 'XYZ')
-consumer2 = oauth.OAuthConsumer('K2', 'S2')
-token2 = oauth.OAuthToken('kkkk2', 'ZYX')
-token3 = oauth.OAuthToken('kkkk3', 'ZYX')
-
-
-class TestingOAuthDataStore(oauth.OAuthDataStore):
- """In memory predefined OAuthDataStore for testing."""
-
- consumers = {
- consumer1.key: consumer1,
- consumer2.key: consumer2,
- }
-
- tokens = {
- token1.key: token1,
- token2.key: token2
- }
-
- def lookup_consumer(self, key):
- return self.consumers.get(key)
-
- def lookup_token(self, token_type, token_token):
- return self.tokens.get(token_token)
-
- def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
- return None
-
-testingOAuthStore = TestingOAuthDataStore()
-
-sign_meth_HMAC_SHA1 = oauth.OAuthSignatureMethod_HMAC_SHA1()
-sign_meth_PLAINTEXT = oauth.OAuthSignatureMethod_PLAINTEXT()
-
-
-def load_with_scenarios(loader, standard_tests, pattern):
- """Load the tests in a given module.
-
- This just applies testscenarios.generate_scenarios to all the tests that
- are present. We do it at load time rather than at run time, because it
- plays nicer with various tools.
- """
- suite = loader.suiteClass()
- suite.addTests(testscenarios.generate_scenarios(standard_tests))
- return suite
diff --git a/src/leap/soledad/tests/u1db_tests/test_backends.py b/src/leap/soledad/tests/u1db_tests/test_backends.py
deleted file mode 100644
index a53b01ba..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_backends.py
+++ /dev/null
@@ -1,1907 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""The backend class for U1DB. This deals with hiding storage details."""
-
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-from u1db import (
- DocumentBase,
- errors,
- vectorclock,
-)
-
-from leap.soledad.tests import u1db_tests as tests
-
-simple_doc = tests.simple_doc
-nested_doc = tests.nested_doc
-
-from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
- make_http_app,
- make_oauth_http_app,
-)
-
-from u1db.remote import (
- http_database,
-)
-
-
-def make_http_database_for_test(test, replica_uid, path='test'):
- test.startServer()
- test.request_state._create_database(replica_uid)
- return http_database.HTTPDatabase(test.getURL(path))
-
-
-def copy_http_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- return test.request_state._copy_database(db)
-
-
-def make_oauth_http_database_for_test(test, replica_uid):
- http_db = make_http_database_for_test(test, replica_uid, '~/test')
- http_db.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return http_db
-
-
-def copy_oauth_http_database_for_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR
- # HOUSE.
- http_db = test.request_state._copy_database(db)
- http_db.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return http_db
-
-
-class TestAlternativeDocument(DocumentBase):
- """A (not very) alternative implementation of Document."""
-
-
-class AllDatabaseTests(tests.DatabaseBaseTests, tests.TestCaseWithServer):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS + [
- ('http', {'make_database_for_test': make_http_database_for_test,
- 'copy_database_for_test': copy_http_database_for_test,
- 'make_document_for_test': tests.make_document_for_test,
- 'make_app_with_state': make_http_app}),
- ('oauth_http', {'make_database_for_test':
- make_oauth_http_database_for_test,
- 'copy_database_for_test':
- copy_oauth_http_database_for_test,
- 'make_document_for_test': tests.make_document_for_test,
- 'make_app_with_state': make_oauth_http_app})
- ]
-
- def test_close(self):
- self.db.close()
-
- def test_create_doc_allocating_doc_id(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertNotEqual(None, doc.doc_id)
- self.assertNotEqual(None, doc.rev)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
-
- def test_create_doc_different_ids_same_db(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertNotEqual(doc1.doc_id, doc2.doc_id)
-
- def test_create_doc_with_id(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my-id')
- self.assertEqual('my-id', doc.doc_id)
- self.assertNotEqual(None, doc.rev)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
-
- def test_create_doc_existing_id(self):
- doc = self.db.create_doc_from_json(simple_doc)
- new_content = '{"something": "else"}'
- self.assertRaises(
- errors.RevisionConflict, self.db.create_doc_from_json,
- new_content, doc.doc_id)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
-
- def test_put_doc_creating_initial(self):
- doc = self.make_document('my_doc_id', None, simple_doc)
- new_rev = self.db.put_doc(doc)
- self.assertIsNot(None, new_rev)
- self.assertGetDoc(self.db, 'my_doc_id', new_rev, simple_doc, False)
-
- def test_put_doc_space_in_id(self):
- doc = self.make_document('my doc id', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_put_doc_update(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- orig_rev = doc.rev
- doc.set_json('{"updated": "stuff"}')
- new_rev = self.db.put_doc(doc)
- self.assertNotEqual(new_rev, orig_rev)
- self.assertGetDoc(self.db, 'my_doc_id', new_rev,
- '{"updated": "stuff"}', False)
- self.assertEqual(doc.rev, new_rev)
-
- def test_put_non_ascii_key(self):
- content = json.dumps({u'key\xe5': u'val'})
- doc = self.db.create_doc_from_json(content, doc_id='my_doc')
- self.assertGetDoc(self.db, 'my_doc', doc.rev, content, False)
-
- def test_put_non_ascii_value(self):
- content = json.dumps({'key': u'\xe5'})
- doc = self.db.create_doc_from_json(content, doc_id='my_doc')
- self.assertGetDoc(self.db, 'my_doc', doc.rev, content, False)
-
- def test_put_doc_refuses_no_id(self):
- doc = self.make_document(None, None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
- doc = self.make_document("", None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_put_doc_refuses_slashes(self):
- doc = self.make_document('a/b', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
- doc = self.make_document(r'\b', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_put_doc_url_quoting_is_fine(self):
- doc_id = "%2F%2Ffoo%2Fbar"
- doc = self.make_document(doc_id, None, simple_doc)
- new_rev = self.db.put_doc(doc)
- self.assertGetDoc(self.db, doc_id, new_rev, simple_doc, False)
-
- def test_put_doc_refuses_non_existing_old_rev(self):
- doc = self.make_document('doc-id', 'test:4', simple_doc)
- self.assertRaises(errors.RevisionConflict, self.db.put_doc, doc)
-
- def test_put_doc_refuses_non_ascii_doc_id(self):
- doc = self.make_document('d\xc3\xa5c-id', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_put_fails_with_bad_old_rev(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- old_rev = doc.rev
- bad_doc = self.make_document(doc.doc_id, 'other:1',
- '{"something": "else"}')
- self.assertRaises(errors.RevisionConflict, self.db.put_doc, bad_doc)
- self.assertGetDoc(self.db, 'my_doc_id', old_rev, simple_doc, False)
-
- def test_create_succeeds_after_delete(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.db.delete_doc(doc)
- deleted_doc = self.db.get_doc('my_doc_id', include_deleted=True)
- deleted_vc = vectorclock.VectorClockRev(deleted_doc.rev)
- new_doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.assertGetDoc(self.db, 'my_doc_id', new_doc.rev, simple_doc, False)
- new_vc = vectorclock.VectorClockRev(new_doc.rev)
- self.assertTrue(
- new_vc.is_newer(deleted_vc),
- "%s does not supersede %s" % (new_doc.rev, deleted_doc.rev))
-
- def test_put_succeeds_after_delete(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.db.delete_doc(doc)
- deleted_doc = self.db.get_doc('my_doc_id', include_deleted=True)
- deleted_vc = vectorclock.VectorClockRev(deleted_doc.rev)
- doc2 = self.make_document('my_doc_id', None, simple_doc)
- self.db.put_doc(doc2)
- self.assertGetDoc(self.db, 'my_doc_id', doc2.rev, simple_doc, False)
- new_vc = vectorclock.VectorClockRev(doc2.rev)
- self.assertTrue(
- new_vc.is_newer(deleted_vc),
- "%s does not supersede %s" % (doc2.rev, deleted_doc.rev))
-
- def test_get_doc_after_put(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.assertGetDoc(self.db, 'my_doc_id', doc.rev, simple_doc, False)
-
- def test_get_doc_nonexisting(self):
- self.assertIs(None, self.db.get_doc('non-existing'))
-
- def test_get_doc_deleted(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.db.delete_doc(doc)
- self.assertIs(None, self.db.get_doc('my_doc_id'))
-
- def test_get_doc_include_deleted(self):
- doc = self.db.create_doc_from_json(simple_doc, doc_id='my_doc_id')
- self.db.delete_doc(doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, doc.rev, None, False)
-
- def test_get_docs(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertEqual([doc1, doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id])))
-
- def test_get_docs_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.db.delete_doc(doc1)
- self.assertEqual([doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id])))
-
- def test_get_docs_include_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.db.delete_doc(doc1)
- self.assertEqual(
- [doc1, doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id],
- include_deleted=True)))
-
- def test_get_docs_request_ordered(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertEqual([doc1, doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id])))
- self.assertEqual([doc2, doc1],
- list(self.db.get_docs([doc2.doc_id, doc1.doc_id])))
-
- def test_get_docs_empty_list(self):
- self.assertEqual([], list(self.db.get_docs([])))
-
- def test_handles_nested_content(self):
- doc = self.db.create_doc_from_json(nested_doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, nested_doc, False)
-
- def test_handles_doc_with_null(self):
- doc = self.db.create_doc_from_json('{"key": null}')
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, '{"key": null}', False)
-
- def test_delete_doc(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
- orig_rev = doc.rev
- self.db.delete_doc(doc)
- self.assertNotEqual(orig_rev, doc.rev)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, doc.rev, None, False)
- self.assertIs(None, self.db.get_doc(doc.doc_id))
-
- def test_delete_doc_non_existent(self):
- doc = self.make_document('non-existing', 'other:1', simple_doc)
- self.assertRaises(errors.DocumentDoesNotExist, self.db.delete_doc, doc)
-
- def test_delete_doc_already_deleted(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc)
- self.assertRaises(errors.DocumentAlreadyDeleted,
- self.db.delete_doc, doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, doc.rev, None, False)
-
- def test_delete_doc_bad_rev(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, simple_doc, False)
- doc2 = self.make_document(doc1.doc_id, 'other:1', simple_doc)
- self.assertRaises(errors.RevisionConflict, self.db.delete_doc, doc2)
- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, simple_doc, False)
-
- def test_delete_doc_sets_content_to_None(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc)
- self.assertIs(None, doc.get_json())
-
- def test_delete_doc_rev_supersedes(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc.set_json(nested_doc)
- self.db.put_doc(doc)
- doc.set_json('{"fishy": "content"}')
- self.db.put_doc(doc)
- old_rev = doc.rev
- self.db.delete_doc(doc)
- cur_vc = vectorclock.VectorClockRev(old_rev)
- deleted_vc = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(deleted_vc.is_newer(cur_vc),
- "%s does not supersede %s" % (doc.rev, old_rev))
-
- def test_delete_then_put(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, doc.rev, None, False)
- doc.set_json(nested_doc)
- self.db.put_doc(doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, nested_doc, False)
-
-
-class DocumentSizeTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS
-
- def test_put_doc_refuses_oversized_documents(self):
- self.db.set_document_size_limit(1)
- doc = self.make_document('doc-id', None, simple_doc)
- self.assertRaises(errors.DocumentTooBig, self.db.put_doc, doc)
-
- def test_create_doc_refuses_oversized_documents(self):
- self.db.set_document_size_limit(1)
- self.assertRaises(
- errors.DocumentTooBig, self.db.create_doc_from_json, simple_doc,
- doc_id='my_doc_id')
-
- def test_set_document_size_limit_zero(self):
- self.db.set_document_size_limit(0)
- self.assertEqual(0, self.db.document_size_limit)
-
- def test_set_document_size_limit(self):
- self.db.set_document_size_limit(1000000)
- self.assertEqual(1000000, self.db.document_size_limit)
-
-
-class LocalDatabaseTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS
-
- def test_create_doc_different_ids_diff_db(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- db2 = self.create_database('other-uid')
- doc2 = db2.create_doc_from_json(simple_doc)
- self.assertNotEqual(doc1.doc_id, doc2.doc_id)
-
- def test_put_doc_refuses_slashes_picky(self):
- doc = self.make_document('/a', None, simple_doc)
- self.assertRaises(errors.InvalidDocId, self.db.put_doc, doc)
-
- def test_get_all_docs_empty(self):
- self.assertEqual([], list(self.db.get_all_docs()[1]))
-
- def test_get_all_docs(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertEqual(
- sorted([doc1, doc2]), sorted(list(self.db.get_all_docs()[1])))
-
- def test_get_all_docs_exclude_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.db.delete_doc(doc2)
- self.assertEqual([doc1], list(self.db.get_all_docs()[1]))
-
- def test_get_all_docs_include_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.db.delete_doc(doc2)
- self.assertEqual(
- sorted([doc1, doc2]),
- sorted(list(self.db.get_all_docs(include_deleted=True)[1])))
-
- def test_get_all_docs_generation(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_doc_from_json(nested_doc)
- self.assertEqual(2, self.db.get_all_docs()[0])
-
- def test_simple_put_doc_if_newer(self):
- doc = self.make_document('my-doc-id', 'test:1', simple_doc)
- state_at_gen = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(('inserted', 1), state_at_gen)
- self.assertGetDoc(self.db, 'my-doc-id', 'test:1', simple_doc, False)
-
- def test_simple_put_doc_if_newer_deleted(self):
- self.db.create_doc_from_json('{}', doc_id='my-doc-id')
- doc = self.make_document('my-doc-id', 'test:2', None)
- state_at_gen = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(('inserted', 2), state_at_gen)
- self.assertGetDocIncludeDeleted(
- self.db, 'my-doc-id', 'test:2', None, False)
-
- def test_put_doc_if_newer_already_superseded(self):
- orig_doc = '{"new": "doc"}'
- doc1 = self.db.create_doc_from_json(orig_doc)
- doc1_rev1 = doc1.rev
- doc1.set_json(simple_doc)
- self.db.put_doc(doc1)
- doc1_rev2 = doc1.rev
- # Nothing is inserted, because the document is already superseded
- doc = self.make_document(doc1.doc_id, doc1_rev1, orig_doc)
- state, _ = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual('superseded', state)
- self.assertGetDoc(self.db, doc1.doc_id, doc1_rev2, simple_doc, False)
-
- def test_put_doc_if_newer_autoresolve(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- rev = doc1.rev
- doc = self.make_document(doc1.doc_id, "whatever:1", doc1.get_json())
- state, _ = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual('superseded', state)
- doc2 = self.db.get_doc(doc1.doc_id)
- v2 = vectorclock.VectorClockRev(doc2.rev)
- self.assertTrue(v2.is_newer(vectorclock.VectorClockRev("whatever:1")))
- self.assertTrue(v2.is_newer(vectorclock.VectorClockRev(rev)))
- # strictly newer locally
- self.assertTrue(rev not in doc2.rev)
-
- def test_put_doc_if_newer_already_converged(self):
- orig_doc = '{"new": "doc"}'
- doc1 = self.db.create_doc_from_json(orig_doc)
- state_at_gen = self.db._put_doc_if_newer(
- doc1, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(('converged', 1), state_at_gen)
-
- def test_put_doc_if_newer_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- # Nothing is inserted, the document id is returned as would-conflict
- alt_doc = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- state, _ = self.db._put_doc_if_newer(
- alt_doc, save_conflict=False, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual('conflicted', state)
- # The database wasn't altered
- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, simple_doc, False)
-
- def test_put_doc_if_newer_newer_generation(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- doc = self.make_document('doc_id', 'other:2', simple_doc)
- state, _ = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='other', replica_gen=2,
- replica_trans_id='T-irrelevant')
- self.assertEqual('inserted', state)
-
- def test_put_doc_if_newer_same_generation_same_txid(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- doc = self.db.create_doc_from_json(simple_doc)
- self.make_document(doc.doc_id, 'other:1', simple_doc)
- state, _ = self.db._put_doc_if_newer(
- doc, save_conflict=False, replica_uid='other', replica_gen=1,
- replica_trans_id='T-sid')
- self.assertEqual('converged', state)
-
- def test_put_doc_if_newer_wrong_transaction_id(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- doc = self.make_document('doc_id', 'other:1', simple_doc)
- self.assertRaises(
- errors.InvalidTransactionId,
- self.db._put_doc_if_newer, doc, save_conflict=False,
- replica_uid='other', replica_gen=1, replica_trans_id='T-sad')
-
- def test_put_doc_if_newer_old_generation_older_doc(self):
- orig_doc = '{"new": "doc"}'
- doc = self.db.create_doc_from_json(orig_doc)
- doc_rev1 = doc.rev
- doc.set_json(simple_doc)
- self.db.put_doc(doc)
- self.db._set_replica_gen_and_trans_id('other', 3, 'T-sid')
- older_doc = self.make_document(doc.doc_id, doc_rev1, simple_doc)
- state, _ = self.db._put_doc_if_newer(
- older_doc, save_conflict=False, replica_uid='other', replica_gen=8,
- replica_trans_id='T-irrelevant')
- self.assertEqual('superseded', state)
-
- def test_put_doc_if_newer_old_generation_newer_doc(self):
- self.db._set_replica_gen_and_trans_id('other', 5, 'T-sid')
- doc = self.make_document('doc_id', 'other:1', simple_doc)
- self.assertRaises(
- errors.InvalidGeneration,
- self.db._put_doc_if_newer, doc, save_conflict=False,
- replica_uid='other', replica_gen=1, replica_trans_id='T-sad')
-
- def test_put_doc_if_newer_replica_uid(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- doc2 = self.make_document(doc1.doc_id, doc1.rev + '|other:1',
- nested_doc)
- self.assertEqual('inserted',
- self.db._put_doc_if_newer(
- doc2,
- save_conflict=False,
- replica_uid='other',
- replica_gen=2,
- replica_trans_id='T-id2')[0])
- self.assertEqual((2, 'T-id2'), self.db._get_replica_gen_and_trans_id(
- 'other'))
- # Compare to the old rev, should be superseded
- doc2 = self.make_document(doc1.doc_id, doc1.rev, nested_doc)
- self.assertEqual('superseded',
- self.db._put_doc_if_newer(
- doc2,
- save_conflict=False,
- replica_uid='other',
- replica_gen=3,
- replica_trans_id='T-id3')[0])
- self.assertEqual(
- (3, 'T-id3'), self.db._get_replica_gen_and_trans_id('other'))
- # A conflict that isn't saved still records the sync gen, because we
- # don't need to see it again
- doc2 = self.make_document(doc1.doc_id, doc1.rev + '|fourth:1',
- '{}')
- self.assertEqual('conflicted',
- self.db._put_doc_if_newer(
- doc2,
- save_conflict=False,
- replica_uid='other',
- replica_gen=4,
- replica_trans_id='T-id4')[0])
- self.assertEqual(
- (4, 'T-id4'), self.db._get_replica_gen_and_trans_id('other'))
-
- def test__get_replica_gen_and_trans_id(self):
- self.assertEqual(
- (0, ''), self.db._get_replica_gen_and_trans_id('other-db'))
- self.db._set_replica_gen_and_trans_id('other-db', 2, 'T-transaction')
- self.assertEqual(
- (2, 'T-transaction'),
- self.db._get_replica_gen_and_trans_id('other-db'))
-
- def test_put_updates_transaction_log(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- doc.set_json('{"something": "else"}')
- self.db.put_doc(doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual((2, last_trans_id, [(doc.doc_id, 2, last_trans_id)]),
- self.db.whats_changed())
-
- def test_delete_updates_transaction_log(self):
- doc = self.db.create_doc_from_json(simple_doc)
- db_gen, _, _ = self.db.whats_changed()
- self.db.delete_doc(doc)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual((2, last_trans_id, [(doc.doc_id, 2, last_trans_id)]),
- self.db.whats_changed(db_gen))
-
- def test_whats_changed_initial_database(self):
- self.assertEqual((0, '', []), self.db.whats_changed())
-
- def test_whats_changed_returns_one_id_for_multiple_changes(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc.set_json('{"new": "contents"}')
- self.db.put_doc(doc)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual((2, last_trans_id, [(doc.doc_id, 2, last_trans_id)]),
- self.db.whats_changed())
- self.assertEqual((2, last_trans_id, []), self.db.whats_changed(2))
-
- def test_whats_changed_returns_last_edits_ascending(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc.set_json('{"new": "contents"}')
- self.db.delete_doc(doc1)
- delete_trans_id = self.getLastTransId(self.db)
- self.db.put_doc(doc)
- put_trans_id = self.getLastTransId(self.db)
- self.assertEqual((4, put_trans_id,
- [(doc1.doc_id, 3, delete_trans_id),
- (doc.doc_id, 4, put_trans_id)]),
- self.db.whats_changed())
-
- def test_whats_changed_doesnt_include_old_gen(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(simple_doc)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual((3, last_trans_id, [(doc2.doc_id, 3, last_trans_id)]),
- self.db.whats_changed(2))
-
-
-class LocalDatabaseValidateGenNTransIdTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS
-
- def test_validate_gen_and_trans_id(self):
- self.db.create_doc_from_json(simple_doc)
- gen, trans_id = self.db._get_generation_info()
- self.db.validate_gen_and_trans_id(gen, trans_id)
-
- def test_validate_gen_and_trans_id_invalid_txid(self):
- self.db.create_doc_from_json(simple_doc)
- gen, _ = self.db._get_generation_info()
- self.assertRaises(
- errors.InvalidTransactionId,
- self.db.validate_gen_and_trans_id, gen, 'wrong')
-
- def test_validate_gen_and_trans_id_invalid_gen(self):
- self.db.create_doc_from_json(simple_doc)
- gen, trans_id = self.db._get_generation_info()
- self.assertRaises(
- errors.InvalidGeneration,
- self.db.validate_gen_and_trans_id, gen + 1, trans_id)
-
-
-class LocalDatabaseValidateSourceGenTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS
-
- def test_validate_source_gen_and_trans_id_same(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- self.db._validate_source('other', 1, 'T-sid')
-
- def test_validate_source_gen_newer(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- self.db._validate_source('other', 2, 'T-whatevs')
-
- def test_validate_source_wrong_txid(self):
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-sid')
- self.assertRaises(
- errors.InvalidTransactionId,
- self.db._validate_source, 'other', 1, 'T-sad')
-
-
-class LocalDatabaseWithConflictsTests(tests.DatabaseBaseTests):
- # test supporting/functionality around storing conflicts
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS
-
- def test_get_docs_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual([doc2], list(self.db.get_docs([doc1.doc_id])))
-
- def test_get_docs_conflicts_ignored(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- alt_doc = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- no_conflict_doc = self.make_document(doc1.doc_id, 'alternate:1',
- nested_doc)
- self.assertEqual([no_conflict_doc, doc2],
- list(self.db.get_docs([doc1.doc_id, doc2.doc_id],
- check_for_conflicts=False)))
-
- def test_get_doc_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- alt_doc = self.make_document(doc.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual([alt_doc, doc],
- self.db.get_doc_conflicts(doc.doc_id))
-
- def test_get_all_docs_sees_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- alt_doc = self.make_document(doc.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- _, docs = self.db.get_all_docs()
- self.assertTrue(list(docs)[0].has_conflicts)
-
- def test_get_doc_conflicts_unconflicted(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertEqual([], self.db.get_doc_conflicts(doc.doc_id))
-
- def test_get_doc_conflicts_no_such_id(self):
- self.assertEqual([], self.db.get_doc_conflicts('doc-id'))
-
- def test_resolve_doc(self):
- doc = self.db.create_doc_from_json(simple_doc)
- alt_doc = self.make_document(doc.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc.doc_id,
- [('alternate:1', nested_doc),
- (doc.rev, simple_doc)])
- orig_rev = doc.rev
- self.db.resolve_doc(doc, [alt_doc.rev, doc.rev])
- self.assertNotEqual(orig_rev, doc.rev)
- self.assertFalse(doc.has_conflicts)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
- self.assertGetDocConflicts(self.db, doc.doc_id, [])
-
- def test_resolve_doc_picks_biggest_vcr(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc2.rev, nested_doc),
- (doc1.rev, simple_doc)])
- orig_doc1_rev = doc1.rev
- self.db.resolve_doc(doc1, [doc2.rev, doc1.rev])
- self.assertFalse(doc1.has_conflicts)
- self.assertNotEqual(orig_doc1_rev, doc1.rev)
- self.assertGetDoc(self.db, doc1.doc_id, doc1.rev, simple_doc, False)
- self.assertGetDocConflicts(self.db, doc1.doc_id, [])
- vcr_1 = vectorclock.VectorClockRev(orig_doc1_rev)
- vcr_2 = vectorclock.VectorClockRev(doc2.rev)
- vcr_new = vectorclock.VectorClockRev(doc1.rev)
- self.assertTrue(vcr_new.is_newer(vcr_1))
- self.assertTrue(vcr_new.is_newer(vcr_2))
-
- def test_resolve_doc_partial_not_winning(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc2.rev, nested_doc),
- (doc1.rev, simple_doc)])
- content3 = '{"key": "valin3"}'
- doc3 = self.make_document(doc1.doc_id, 'third:1', content3)
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='bar')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc3.rev, content3),
- (doc1.rev, simple_doc),
- (doc2.rev, nested_doc)])
- self.db.resolve_doc(doc1, [doc2.rev, doc1.rev])
- self.assertTrue(doc1.has_conflicts)
- self.assertGetDoc(self.db, doc1.doc_id, doc3.rev, content3, True)
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc3.rev, content3),
- (doc1.rev, simple_doc)])
-
- def test_resolve_doc_partial_winning(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- content3 = '{"key": "valin3"}'
- doc3 = self.make_document(doc1.doc_id, 'third:1', content3)
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='bar')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc3.rev, content3),
- (doc1.rev, simple_doc),
- (doc2.rev, nested_doc)])
- self.db.resolve_doc(doc1, [doc3.rev, doc1.rev])
- self.assertTrue(doc1.has_conflicts)
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc1.rev, simple_doc),
- (doc2.rev, nested_doc)])
-
- def test_resolve_doc_with_delete_conflict(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc1)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc2.rev, nested_doc),
- (doc1.rev, None)])
- self.db.resolve_doc(doc2, [doc1.rev, doc2.rev])
- self.assertGetDocConflicts(self.db, doc1.doc_id, [])
- self.assertGetDoc(self.db, doc2.doc_id, doc2.rev, nested_doc, False)
-
- def test_resolve_doc_with_delete_to_delete(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc1)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [(doc2.rev, nested_doc),
- (doc1.rev, None)])
- self.db.resolve_doc(doc1, [doc1.rev, doc2.rev])
- self.assertGetDocConflicts(self.db, doc1.doc_id, [])
- self.assertGetDocIncludeDeleted(
- self.db, doc1.doc_id, doc1.rev, None, False)
-
- def test_put_doc_if_newer_save_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- # Document is inserted as a conflict
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- state, _ = self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual('conflicted', state)
- # The database was updated
- self.assertGetDoc(self.db, doc1.doc_id, doc2.rev, nested_doc, True)
-
- def test_force_doc_conflict_supersedes_properly(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', '{"b": 1}')
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- doc3 = self.make_document(doc1.doc_id, 'altalt:1', '{"c": 1}')
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='bar')
- doc22 = self.make_document(doc1.doc_id, 'alternate:2', '{"b": 2}')
- self.db._put_doc_if_newer(
- doc22, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='zed')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:2', doc22.get_json()),
- ('altalt:1', doc3.get_json()),
- (doc1.rev, simple_doc)])
-
- def test_put_doc_if_newer_save_conflict_was_deleted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc1)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertTrue(doc2.has_conflicts)
- self.assertGetDoc(
- self.db, doc1.doc_id, 'alternate:1', nested_doc, True)
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:1', nested_doc),
- (doc1.rev, None)])
-
- def test_put_doc_if_newer_propagates_full_resolution(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- resolved_vcr = vectorclock.VectorClockRev(doc1.rev)
- vcr_2 = vectorclock.VectorClockRev(doc2.rev)
- resolved_vcr.maximize(vcr_2)
- resolved_vcr.increment('alternate')
- doc_resolved = self.make_document(doc1.doc_id, resolved_vcr.as_str(),
- '{"good": 1}')
- state, _ = self.db._put_doc_if_newer(
- doc_resolved, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertEqual('inserted', state)
- self.assertFalse(doc_resolved.has_conflicts)
- self.assertGetDocConflicts(self.db, doc1.doc_id, [])
- doc3 = self.db.get_doc(doc1.doc_id)
- self.assertFalse(doc3.has_conflicts)
-
- def test_put_doc_if_newer_propagates_partial_resolution(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'altalt:1', '{}')
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- doc3 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc3, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:1', nested_doc),
- ('test:1', simple_doc),
- ('altalt:1', '{}')])
- resolved_vcr = vectorclock.VectorClockRev(doc1.rev)
- vcr_3 = vectorclock.VectorClockRev(doc3.rev)
- resolved_vcr.maximize(vcr_3)
- resolved_vcr.increment('alternate')
- doc_resolved = self.make_document(doc1.doc_id, resolved_vcr.as_str(),
- '{"good": 1}')
- state, _ = self.db._put_doc_if_newer(
- doc_resolved, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='foo3')
- self.assertEqual('inserted', state)
- self.assertTrue(doc_resolved.has_conflicts)
- doc4 = self.db.get_doc(doc1.doc_id)
- self.assertTrue(doc4.has_conflicts)
- self.assertGetDocConflicts(self.db, doc1.doc_id,
- [('alternate:2|test:1', '{"good": 1}'),
- ('altalt:1', '{}')])
-
- def test_put_doc_if_newer_replica_uid(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db._set_replica_gen_and_trans_id('other', 1, 'T-id')
- doc2 = self.make_document(doc1.doc_id, doc1.rev + '|other:1',
- nested_doc)
- self.db._put_doc_if_newer(doc2, save_conflict=True,
- replica_uid='other', replica_gen=2,
- replica_trans_id='T-id2')
- # Conflict vs the current update
- doc2 = self.make_document(doc1.doc_id, doc1.rev + '|third:3',
- '{}')
- self.assertEqual('conflicted',
- self.db._put_doc_if_newer(
- doc2,
- save_conflict=True,
- replica_uid='other',
- replica_gen=3,
- replica_trans_id='T-id3')[0])
- self.assertEqual(
- (3, 'T-id3'), self.db._get_replica_gen_and_trans_id('other'))
-
- def test_put_doc_if_newer_autoresolve_2(self):
- # this is an ordering variant of _3, but that already works
- # adding the test explicitly to catch the regression easily
- doc_a1 = self.db.create_doc_from_json(simple_doc)
- doc_a2 = self.make_document(doc_a1.doc_id, 'test:2', "{}")
- doc_a1b1 = self.make_document(doc_a1.doc_id, 'test:1|other:1',
- '{"a":"42"}')
- doc_a3 = self.make_document(doc_a1.doc_id, 'test:2|other:1', "{}")
- state, _ = self.db._put_doc_if_newer(
- doc_a2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(state, 'inserted')
- state, _ = self.db._put_doc_if_newer(
- doc_a1b1, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertEqual(state, 'conflicted')
- state, _ = self.db._put_doc_if_newer(
- doc_a3, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='foo3')
- self.assertEqual(state, 'inserted')
- self.assertFalse(self.db.get_doc(doc_a1.doc_id).has_conflicts)
-
- def test_put_doc_if_newer_autoresolve_3(self):
- doc_a1 = self.db.create_doc_from_json(simple_doc)
- doc_a1b1 = self.make_document(doc_a1.doc_id, 'test:1|other:1', "{}")
- doc_a2 = self.make_document(doc_a1.doc_id, 'test:2', '{"a":"42"}')
- doc_a3 = self.make_document(doc_a1.doc_id, 'test:3', "{}")
- state, _ = self.db._put_doc_if_newer(
- doc_a1b1, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(state, 'inserted')
- state, _ = self.db._put_doc_if_newer(
- doc_a2, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertEqual(state, 'conflicted')
- state, _ = self.db._put_doc_if_newer(
- doc_a3, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='foo3')
- self.assertEqual(state, 'superseded')
- doc = self.db.get_doc(doc_a1.doc_id, True)
- self.assertFalse(doc.has_conflicts)
- rev = vectorclock.VectorClockRev(doc.rev)
- rev_a3 = vectorclock.VectorClockRev('test:3')
- rev_a1b1 = vectorclock.VectorClockRev('test:1|other:1')
- self.assertTrue(rev.is_newer(rev_a3))
- self.assertTrue('test:4' in doc.rev) # locally increased
- self.assertTrue(rev.is_newer(rev_a1b1))
-
- def test_put_doc_if_newer_autoresolve_4(self):
- doc_a1 = self.db.create_doc_from_json(simple_doc)
- doc_a1b1 = self.make_document(doc_a1.doc_id, 'test:1|other:1', None)
- doc_a2 = self.make_document(doc_a1.doc_id, 'test:2', '{"a":"42"}')
- doc_a3 = self.make_document(doc_a1.doc_id, 'test:3', None)
- state, _ = self.db._put_doc_if_newer(
- doc_a1b1, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertEqual(state, 'inserted')
- state, _ = self.db._put_doc_if_newer(
- doc_a2, save_conflict=True, replica_uid='r', replica_gen=2,
- replica_trans_id='foo2')
- self.assertEqual(state, 'conflicted')
- state, _ = self.db._put_doc_if_newer(
- doc_a3, save_conflict=True, replica_uid='r', replica_gen=3,
- replica_trans_id='foo3')
- self.assertEqual(state, 'superseded')
- doc = self.db.get_doc(doc_a1.doc_id, True)
- self.assertFalse(doc.has_conflicts)
- rev = vectorclock.VectorClockRev(doc.rev)
- rev_a3 = vectorclock.VectorClockRev('test:3')
- rev_a1b1 = vectorclock.VectorClockRev('test:1|other:1')
- self.assertTrue(rev.is_newer(rev_a3))
- self.assertTrue('test:4' in doc.rev) # locally increased
- self.assertTrue(rev.is_newer(rev_a1b1))
-
- def test_put_refuses_to_update_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- content2 = '{"key": "altval"}'
- doc2 = self.make_document(doc1.doc_id, 'altrev:1', content2)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDoc(self.db, doc1.doc_id, doc2.rev, content2, True)
- content3 = '{"key": "local"}'
- doc2.set_json(content3)
- self.assertRaises(errors.ConflictedDoc, self.db.put_doc, doc2)
-
- def test_delete_refuses_for_conflicted(self):
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'altrev:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertGetDoc(self.db, doc2.doc_id, doc2.rev, nested_doc, True)
- self.assertRaises(errors.ConflictedDoc, self.db.delete_doc, doc2)
-
-
-class DatabaseIndexTests(tests.DatabaseBaseTests):
-
- scenarios = tests.LOCAL_DATABASES_SCENARIOS
-
- def assertParseError(self, definition):
- self.db.create_doc_from_json(nested_doc)
- self.assertRaises(
- errors.IndexDefinitionParseError, self.db.create_index, 'idx',
- definition)
-
- def assertIndexCreatable(self, definition):
- name = "idx"
- self.db.create_doc_from_json(nested_doc)
- self.db.create_index(name, definition)
- self.assertEqual(
- [(name, [definition])], self.db.list_indexes())
-
- def test_create_index(self):
- self.db.create_index('test-idx', 'name')
- self.assertEqual([('test-idx', ['name'])],
- self.db.list_indexes())
-
- def test_create_index_on_non_ascii_field_name(self):
- doc = self.db.create_doc_from_json(json.dumps({u'\xe5': 'value'}))
- self.db.create_index('test-idx', u'\xe5')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_list_indexes_with_non_ascii_field_names(self):
- self.db.create_index('test-idx', u'\xe5')
- self.assertEqual(
- [('test-idx', [u'\xe5'])], self.db.list_indexes())
-
- def test_create_index_evaluates_it(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_wildcard_matches_unicode_value(self):
- doc = self.db.create_doc_from_json(json.dumps({"key": u"valu\xe5"}))
- self.db.create_index('test-idx', 'key')
- self.assertEqual([doc], self.db.get_from_index('test-idx', '*'))
-
- def test_retrieve_unicode_value_from_index(self):
- doc = self.db.create_doc_from_json(json.dumps({"key": u"valu\xe5"}))
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', u"valu\xe5"))
-
- def test_create_index_fails_if_name_taken(self):
- self.db.create_index('test-idx', 'key')
- self.assertRaises(errors.IndexNameTakenError,
- self.db.create_index,
- 'test-idx', 'stuff')
-
- def test_create_index_does_not_fail_if_name_taken_with_same_index(self):
- self.db.create_index('test-idx', 'key')
- self.db.create_index('test-idx', 'key')
- self.assertEqual([('test-idx', ['key'])], self.db.list_indexes())
-
- def test_create_index_does_not_duplicate_indexed_fields(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.db.delete_index('test-idx')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(1, len(self.db.get_from_index('test-idx', 'value')))
-
- def test_delete_index_does_not_remove_fields_from_other_indexes(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.db.create_index('test-idx2', 'key')
- self.db.delete_index('test-idx')
- self.assertEqual(1, len(self.db.get_from_index('test-idx2', 'value')))
-
- def test_create_index_after_deleting_document(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc2)
- self.db.create_index('test-idx', 'key')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_delete_index(self):
- self.db.create_index('test-idx', 'key')
- self.assertEqual([('test-idx', ['key'])], self.db.list_indexes())
- self.db.delete_index('test-idx')
- self.assertEqual([], self.db.list_indexes())
-
- def test_create_adds_to_index(self):
- self.db.create_index('test-idx', 'key')
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_get_from_index_unmatched(self):
- self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual([], self.db.get_from_index('test-idx', 'novalue'))
-
- def test_create_index_multiple_exact_matches(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- sorted([doc, doc2]),
- sorted(self.db.get_from_index('test-idx', 'value')))
-
- def test_get_from_index(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'value'))
-
- def test_get_from_index_multi(self):
- content = '{"key": "value", "key2": "value2"}'
- doc = self.db.create_doc_from_json(content)
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'value', 'value2'))
-
- def test_get_from_index_multi_list(self):
- doc = self.db.create_doc_from_json(
- '{"key": "value", "key2": ["value2-1", "value2-2", "value2-3"]}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'value', 'value2-1'))
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'value', 'value2-2'))
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'value', 'value2-3'))
- self.assertEqual(
- [('value', 'value2-1'), ('value', 'value2-2'),
- ('value', 'value2-3')],
- sorted(self.db.get_index_keys('test-idx')))
-
- def test_get_from_index_sees_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key', 'key2')
- alt_doc = self.make_document(
- doc.doc_id, 'alternate:1',
- '{"key": "value", "key2": ["value2-1", "value2-2", "value2-3"]}')
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- docs = self.db.get_from_index('test-idx', 'value', 'value2-1')
- self.assertTrue(docs[0].has_conflicts)
-
- def test_get_index_keys_multi_list_list(self):
- self.db.create_doc_from_json(
- '{"key": "value1-1 value1-2 value1-3", '
- '"key2": ["value2-1", "value2-2", "value2-3"]}')
- self.db.create_index('test-idx', 'split_words(key)', 'key2')
- self.assertEqual(
- [(u'value1-1', u'value2-1'), (u'value1-1', u'value2-2'),
- (u'value1-1', u'value2-3'), (u'value1-2', u'value2-1'),
- (u'value1-2', u'value2-2'), (u'value1-2', u'value2-3'),
- (u'value1-3', u'value2-1'), (u'value1-3', u'value2-2'),
- (u'value1-3', u'value2-3')],
- sorted(self.db.get_index_keys('test-idx')))
-
- def test_get_from_index_multi_ordered(self):
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2, doc1],
- self.db.get_from_index('test-idx', 'v*', '*'))
-
- def test_get_range_from_index_start_end(self):
- doc1 = self.db.create_doc_from_json('{"key": "value3"}')
- doc2 = self.db.create_doc_from_json('{"key": "value2"}')
- self.db.create_doc_from_json('{"key": "value4"}')
- self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc2, doc1],
- self.db.get_range_from_index('test-idx', 'value2', 'value3'))
-
- def test_get_range_from_index_start(self):
- doc1 = self.db.create_doc_from_json('{"key": "value3"}')
- doc2 = self.db.create_doc_from_json('{"key": "value2"}')
- doc3 = self.db.create_doc_from_json('{"key": "value4"}')
- self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc2, doc1, doc3],
- self.db.get_range_from_index('test-idx', 'value2'))
-
- def test_get_range_from_index_sees_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- alt_doc = self.make_document(
- doc.doc_id, 'alternate:1', '{"key": "valuedepalue"}')
- self.db._put_doc_if_newer(
- alt_doc, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- docs = self.db.get_range_from_index('test-idx', 'a')
- self.assertTrue(docs[0].has_conflicts)
-
- def test_get_range_from_index_end(self):
- self.db.create_doc_from_json('{"key": "value3"}')
- doc2 = self.db.create_doc_from_json('{"key": "value2"}')
- self.db.create_doc_from_json('{"key": "value4"}')
- doc4 = self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc4, doc2],
- self.db.get_range_from_index('test-idx', None, 'value2'))
-
- def test_get_wildcard_range_from_index_start(self):
- doc1 = self.db.create_doc_from_json('{"key": "value4"}')
- doc2 = self.db.create_doc_from_json('{"key": "value23"}')
- doc3 = self.db.create_doc_from_json('{"key": "value2"}')
- doc4 = self.db.create_doc_from_json('{"key": "value22"}')
- self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc3, doc4, doc2, doc1],
- self.db.get_range_from_index('test-idx', 'value2*'))
-
- def test_get_wildcard_range_from_index_end(self):
- self.db.create_doc_from_json('{"key": "value4"}')
- doc2 = self.db.create_doc_from_json('{"key": "value23"}')
- doc3 = self.db.create_doc_from_json('{"key": "value2"}')
- doc4 = self.db.create_doc_from_json('{"key": "value22"}')
- doc5 = self.db.create_doc_from_json('{"key": "value1"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc5, doc3, doc4, doc2],
- self.db.get_range_from_index('test-idx', None, 'value2*'))
-
- def test_get_wildcard_range_from_index_start_end(self):
- self.db.create_doc_from_json('{"key": "a"}')
- self.db.create_doc_from_json('{"key": "boo3"}')
- doc3 = self.db.create_doc_from_json('{"key": "catalyst"}')
- doc4 = self.db.create_doc_from_json('{"key": "whaever"}')
- self.db.create_doc_from_json('{"key": "zerg"}')
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- [doc3, doc4],
- self.db.get_range_from_index('test-idx', 'cat*', 'zap*'))
-
- def test_get_range_from_index_multi_column_start_end(self):
- self.db.create_doc_from_json('{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc3, doc2],
- self.db.get_range_from_index(
- 'test-idx', ('value2', 'value2'), ('value2', 'value3')))
-
- def test_get_range_from_index_multi_column_start(self):
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- self.db.create_doc_from_json('{"key": "value2", "key2": "value2"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc2, doc1],
- self.db.get_range_from_index('test-idx', ('value2', 'value3')))
-
- def test_get_range_from_index_multi_column_end(self):
- self.db.create_doc_from_json('{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value3"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2],
- self.db.get_range_from_index(
- 'test-idx', None, ('value2', 'value3')))
-
- def test_get_wildcard_range_from_index_multi_column_start(self):
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value23"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc3, doc2, doc1],
- self.db.get_range_from_index('test-idx', ('value2', 'value2*')))
-
- def test_get_wildcard_range_from_index_multi_column_end(self):
- self.db.create_doc_from_json('{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value23"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2],
- self.db.get_range_from_index(
- 'test-idx', None, ('value2', 'value2*')))
-
- def test_get_glob_range_from_index_multi_column_start(self):
- doc1 = self.db.create_doc_from_json(
- '{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value23"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value2"}')
- self.db.create_doc_from_json('{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc2, doc1],
- self.db.get_range_from_index('test-idx', ('value2', '*')))
-
- def test_get_glob_range_from_index_multi_column_end(self):
- self.db.create_doc_from_json('{"key": "value3", "key2": "value4"}')
- doc2 = self.db.create_doc_from_json(
- '{"key": "value2", "key2": "value23"}')
- doc3 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value2"}')
- doc4 = self.db.create_doc_from_json(
- '{"key": "value1", "key2": "value1"}')
- self.db.create_index('test-idx', 'key', 'key2')
- self.assertEqual(
- [doc4, doc3, doc2],
- self.db.get_range_from_index('test-idx', None, ('value2', '*')))
-
- def test_get_range_from_index_illegal_wildcard_order(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_range_from_index, 'test-idx', ('*', 'v2'))
-
- def test_get_range_from_index_illegal_glob_after_wildcard(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_range_from_index, 'test-idx', ('*', 'v*'))
-
- def test_get_range_from_index_illegal_wildcard_order_end(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_range_from_index, 'test-idx', None, ('*', 'v2'))
-
- def test_get_range_from_index_illegal_glob_after_wildcard_end(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_range_from_index, 'test-idx', None, ('*', 'v*'))
-
- def test_get_from_index_fails_if_no_index(self):
- self.assertRaises(
- errors.IndexDoesNotExist, self.db.get_from_index, 'foo')
-
- def test_get_index_keys_fails_if_no_index(self):
- self.assertRaises(errors.IndexDoesNotExist,
- self.db.get_index_keys,
- 'foo')
-
- def test_get_index_keys_works_if_no_docs(self):
- self.db.create_index('test-idx', 'key')
- self.assertEqual([], self.db.get_index_keys('test-idx'))
-
- def test_put_updates_index(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- new_content = '{"key": "altval"}'
- doc.set_json(new_content)
- self.db.put_doc(doc)
- self.assertEqual([], self.db.get_from_index('test-idx', 'value'))
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'altval'))
-
- def test_delete_updates_index(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertEqual(
- sorted([doc, doc2]),
- sorted(self.db.get_from_index('test-idx', 'value')))
- self.db.delete_doc(doc)
- self.assertEqual([doc2], self.db.get_from_index('test-idx', 'value'))
-
- def test_get_from_index_illegal_number_of_entries(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidValueForIndex, self.db.get_from_index, 'test-idx')
- self.assertRaises(
- errors.InvalidValueForIndex,
- self.db.get_from_index, 'test-idx', 'v1')
- self.assertRaises(
- errors.InvalidValueForIndex,
- self.db.get_from_index, 'test-idx', 'v1', 'v2', 'v3')
-
- def test_get_from_index_illegal_wildcard_order(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_from_index, 'test-idx', '*', 'v2')
-
- def test_get_from_index_illegal_glob_after_wildcard(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_from_index, 'test-idx', '*', 'v*')
-
- def test_get_all_from_index(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- # This one should not be in the index
- self.db.create_doc_from_json('{"no": "key"}')
- diff_value_doc = '{"key": "diff value"}'
- doc4 = self.db.create_doc_from_json(diff_value_doc)
- # This is essentially a 'prefix' match, but we match every entry.
- self.assertEqual(
- sorted([doc1, doc2, doc4]),
- sorted(self.db.get_from_index('test-idx', '*')))
-
- def test_get_all_from_index_ordered(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json('{"key": "value x"}')
- doc2 = self.db.create_doc_from_json('{"key": "value b"}')
- doc3 = self.db.create_doc_from_json('{"key": "value a"}')
- doc4 = self.db.create_doc_from_json('{"key": "value m"}')
- # This is essentially a 'prefix' match, but we match every entry.
- self.assertEqual(
- [doc3, doc2, doc4, doc1], self.db.get_from_index('test-idx', '*'))
-
- def test_put_updates_when_adding_key(self):
- doc = self.db.create_doc_from_json("{}")
- self.db.create_index('test-idx', 'key')
- self.assertEqual([], self.db.get_from_index('test-idx', '*'))
- doc.set_json(simple_doc)
- self.db.put_doc(doc)
- self.assertEqual([doc], self.db.get_from_index('test-idx', '*'))
-
- def test_get_from_index_empty_string(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json(simple_doc)
- content2 = '{"key": ""}'
- doc2 = self.db.create_doc_from_json(content2)
- self.assertEqual([doc2], self.db.get_from_index('test-idx', ''))
- # Empty string matches the wildcard.
- self.assertEqual(
- sorted([doc1, doc2]),
- sorted(self.db.get_from_index('test-idx', '*')))
-
- def test_get_from_index_not_null(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.db.create_doc_from_json('{"key": null}')
- self.assertEqual([doc1], self.db.get_from_index('test-idx', '*'))
-
- def test_get_partial_from_index(self):
- content1 = '{"k1": "v1", "k2": "v2"}'
- content2 = '{"k1": "v1", "k2": "x2"}'
- content3 = '{"k1": "v1", "k2": "y2"}'
- # doc4 has a different k1 value, so it doesn't match the prefix.
- content4 = '{"k1": "NN", "k2": "v2"}'
- doc1 = self.db.create_doc_from_json(content1)
- doc2 = self.db.create_doc_from_json(content2)
- doc3 = self.db.create_doc_from_json(content3)
- self.db.create_doc_from_json(content4)
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertEqual(
- sorted([doc1, doc2, doc3]),
- sorted(self.db.get_from_index('test-idx', "v1", "*")))
-
- def test_get_glob_match(self):
- # Note: the exact glob syntax is probably subject to change
- content1 = '{"k1": "v1", "k2": "v1"}'
- content2 = '{"k1": "v1", "k2": "v2"}'
- content3 = '{"k1": "v1", "k2": "v3"}'
- # doc4 has a different k2 prefix value, so it doesn't match
- content4 = '{"k1": "v1", "k2": "ZZ"}'
- self.db.create_index('test-idx', 'k1', 'k2')
- doc1 = self.db.create_doc_from_json(content1)
- doc2 = self.db.create_doc_from_json(content2)
- doc3 = self.db.create_doc_from_json(content3)
- self.db.create_doc_from_json(content4)
- self.assertEqual(
- sorted([doc1, doc2, doc3]),
- sorted(self.db.get_from_index('test-idx', "v1", "v*")))
-
- def test_nested_index(self):
- doc = self.db.create_doc_from_json(nested_doc)
- self.db.create_index('test-idx', 'sub.doc')
- self.assertEqual(
- [doc], self.db.get_from_index('test-idx', 'underneath'))
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertEqual(
- sorted([doc, doc2]),
- sorted(self.db.get_from_index('test-idx', 'underneath')))
-
- def test_nested_nonexistent(self):
- self.db.create_doc_from_json(nested_doc)
- # sub exists, but sub.foo does not:
- self.db.create_index('test-idx', 'sub.foo')
- self.assertEqual([], self.db.get_from_index('test-idx', '*'))
-
- def test_nested_nonexistent2(self):
- self.db.create_doc_from_json(nested_doc)
- self.db.create_index('test-idx', 'sub.foo.bar.baz.qux.fnord')
- self.assertEqual([], self.db.get_from_index('test-idx', '*'))
-
- def test_nested_traverses_lists(self):
- # subpath finds dicts in list
- doc = self.db.create_doc_from_json(
- '{"foo": [{"zap": "bar"}, {"zap": "baz"}]}')
- # subpath only finds dicts in list
- self.db.create_doc_from_json('{"foo": ["zap", "baz"]}')
- self.db.create_index('test-idx', 'foo.zap')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'bar'))
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'baz'))
-
- def test_nested_list_traversal(self):
- # subpath finds dicts in list
- doc = self.db.create_doc_from_json(
- '{"foo": [{"zap": [{"qux": "fnord"}, {"qux": "zombo"}]},'
- '{"zap": "baz"}]}')
- # subpath only finds dicts in list
- self.db.create_index('test-idx', 'foo.zap.qux')
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'fnord'))
- self.assertEqual([doc], self.db.get_from_index('test-idx', 'zombo'))
-
- def test_index_list1(self):
- self.db.create_index("index", "name")
- content = '{"name": ["foo", "bar"]}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "bar")
- self.assertEqual([doc], rows)
-
- def test_index_list2(self):
- self.db.create_index("index", "name")
- content = '{"name": ["foo", "bar"]}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_case_sensitive(self):
- self.db.create_index('test-idx', 'key')
- doc1 = self.db.create_doc_from_json(simple_doc)
- self.assertEqual([], self.db.get_from_index('test-idx', 'V*'))
- self.assertEqual([doc1], self.db.get_from_index('test-idx', 'v*'))
-
- def test_get_from_index_illegal_glob_before_value(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_from_index, 'test-idx', 'v*', 'v2')
-
- def test_get_from_index_illegal_glob_after_glob(self):
- self.db.create_index('test-idx', 'k1', 'k2')
- self.assertRaises(
- errors.InvalidGlobbing,
- self.db.get_from_index, 'test-idx', 'v*', 'v*')
-
- def test_get_from_index_with_sql_wildcards(self):
- self.db.create_index('test-idx', 'key')
- content1 = '{"key": "va%lue"}'
- content2 = '{"key": "value"}'
- content3 = '{"key": "va_lue"}'
- doc1 = self.db.create_doc_from_json(content1)
- self.db.create_doc_from_json(content2)
- doc3 = self.db.create_doc_from_json(content3)
- # The '%' in the search should be treated literally, not as a sql
- # globbing character.
- self.assertEqual([doc1], self.db.get_from_index('test-idx', 'va%*'))
- # Same for '_'
- self.assertEqual([doc3], self.db.get_from_index('test-idx', 'va_*'))
-
- def test_get_from_index_with_lower(self):
- self.db.create_index("index", "lower(name)")
- content = '{"name": "Foo"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_lower_matches_same_case(self):
- self.db.create_index("index", "lower(name)")
- content = '{"name": "foo"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_index_lower_doesnt_match_different_case(self):
- self.db.create_index("index", "lower(name)")
- content = '{"name": "Foo"}'
- self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "Foo")
- self.assertEqual([], rows)
-
- def test_index_lower_doesnt_match_other_index(self):
- self.db.create_index("index", "lower(name)")
- self.db.create_index("other_index", "name")
- content = '{"name": "Foo"}'
- self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "Foo")
- self.assertEqual(0, len(rows))
-
- def test_index_split_words_match_first(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo bar"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_match_second(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo bar"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "bar")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_match_both(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo foo"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_double_space(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo bar"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "bar")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_leading_space(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": " foo bar"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "foo")
- self.assertEqual([doc], rows)
-
- def test_index_split_words_trailing_space(self):
- self.db.create_index("index", "split_words(name)")
- content = '{"name": "foo bar "}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "bar")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_number(self):
- self.db.create_index("index", "number(foo, 5)")
- content = '{"foo": 12}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "00012")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_number_bigger_than_padding(self):
- self.db.create_index("index", "number(foo, 5)")
- content = '{"foo": 123456}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "123456")
- self.assertEqual([doc], rows)
-
- def test_number_mapping_ignores_non_numbers(self):
- self.db.create_index("index", "number(foo, 5)")
- content = '{"foo": 56}'
- doc1 = self.db.create_doc_from_json(content)
- content = '{"foo": "this is not a maigret painting"}'
- self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "*")
- self.assertEqual([doc1], rows)
-
- def test_get_from_index_with_bool(self):
- self.db.create_index("index", "bool(foo)")
- content = '{"foo": true}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "1")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_bool_false(self):
- self.db.create_index("index", "bool(foo)")
- content = '{"foo": false}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "0")
- self.assertEqual([doc], rows)
-
- def test_get_from_index_with_non_bool(self):
- self.db.create_index("index", "bool(foo)")
- content = '{"foo": 42}'
- self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "*")
- self.assertEqual([], rows)
-
- def test_get_from_index_with_combine(self):
- self.db.create_index("index", "combine(foo, bar)")
- content = '{"foo": "value1", "bar": "value2"}'
- doc = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "value1")
- self.assertEqual([doc], rows)
- rows = self.db.get_from_index("index", "value2")
- self.assertEqual([doc], rows)
-
- def test_get_complex_combine(self):
- self.db.create_index(
- "index", "combine(number(foo, 5), lower(bar), split_words(baz))")
- content = '{"foo": 12, "bar": "ALLCAPS", "baz": "qux nox"}'
- doc = self.db.create_doc_from_json(content)
- content = '{"foo": "not a number", "bar": "something"}'
- doc2 = self.db.create_doc_from_json(content)
- rows = self.db.get_from_index("index", "00012")
- self.assertEqual([doc], rows)
- rows = self.db.get_from_index("index", "allcaps")
- self.assertEqual([doc], rows)
- rows = self.db.get_from_index("index", "nox")
- self.assertEqual([doc], rows)
- rows = self.db.get_from_index("index", "something")
- self.assertEqual([doc2], rows)
-
- def test_get_index_keys_from_index(self):
- self.db.create_index('test-idx', 'key')
- content1 = '{"key": "value1"}'
- content2 = '{"key": "value2"}'
- content3 = '{"key": "value2"}'
- self.db.create_doc_from_json(content1)
- self.db.create_doc_from_json(content2)
- self.db.create_doc_from_json(content3)
- self.assertEqual(
- [('value1',), ('value2',)],
- sorted(self.db.get_index_keys('test-idx')))
-
- def test_get_index_keys_from_multicolumn_index(self):
- self.db.create_index('test-idx', 'key1', 'key2')
- content1 = '{"key1": "value1", "key2": "val2-1"}'
- content2 = '{"key1": "value2", "key2": "val2-2"}'
- content3 = '{"key1": "value2", "key2": "val2-2"}'
- content4 = '{"key1": "value2", "key2": "val3"}'
- self.db.create_doc_from_json(content1)
- self.db.create_doc_from_json(content2)
- self.db.create_doc_from_json(content3)
- self.db.create_doc_from_json(content4)
- self.assertEqual([
- ('value1', 'val2-1'),
- ('value2', 'val2-2'),
- ('value2', 'val3')],
- sorted(self.db.get_index_keys('test-idx')))
-
- def test_empty_expr(self):
- self.assertParseError('')
-
- def test_nested_unknown_operation(self):
- self.assertParseError('unknown_operation(field1)')
-
- def test_parse_missing_close_paren(self):
- self.assertParseError("lower(a")
-
- def test_parse_trailing_close_paren(self):
- self.assertParseError("lower(ab))")
-
- def test_parse_trailing_chars(self):
- self.assertParseError("lower(ab)adsf")
-
- def test_parse_empty_op(self):
- self.assertParseError("(ab)")
-
- def test_parse_top_level_commas(self):
- self.assertParseError("a, b")
-
- def test_invalid_field_name(self):
- self.assertParseError("a.")
-
- def test_invalid_inner_field_name(self):
- self.assertParseError("lower(a.)")
-
- def test_gobbledigook(self):
- self.assertParseError("(@#@cc @#!*DFJSXV(()jccd")
-
- def test_leading_space(self):
- self.assertIndexCreatable(" lower(a)")
-
- def test_trailing_space(self):
- self.assertIndexCreatable("lower(a) ")
-
- def test_spaces_before_open_paren(self):
- self.assertIndexCreatable("lower (a)")
-
- def test_spaces_after_open_paren(self):
- self.assertIndexCreatable("lower( a)")
-
- def test_spaces_before_close_paren(self):
- self.assertIndexCreatable("lower(a )")
-
- def test_spaces_before_comma(self):
- self.assertIndexCreatable("combine(a , b , c)")
-
- def test_spaces_after_comma(self):
- self.assertIndexCreatable("combine(a, b, c)")
-
- def test_all_together_now(self):
- self.assertParseError(' (a) ')
-
- def test_all_together_now2(self):
- self.assertParseError('combine(lower(x)x,foo)')
-
-
-class PythonBackendTests(tests.DatabaseBaseTests):
-
- def setUp(self):
- super(PythonBackendTests, self).setUp()
- self.simple_doc = json.loads(simple_doc)
-
- def test_create_doc_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- doc = self.db.create_doc(self.simple_doc, doc_id='my_doc_id')
- self.assertTrue(isinstance(doc, TestAlternativeDocument))
-
- def test_get_doc_after_put_with_factory(self):
- doc = self.db.create_doc(self.simple_doc, doc_id='my_doc_id')
- self.db.set_document_factory(TestAlternativeDocument)
- result = self.db.get_doc('my_doc_id')
- self.assertTrue(isinstance(result, TestAlternativeDocument))
- self.assertEqual(doc.doc_id, result.doc_id)
- self.assertEqual(doc.rev, result.rev)
- self.assertEqual(doc.get_json(), result.get_json())
- self.assertEqual(False, result.has_conflicts)
-
- def test_get_doc_nonexisting_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- self.assertIs(None, self.db.get_doc('non-existing'))
-
- def test_get_all_docs_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- self.db.create_doc(self.simple_doc)
- self.assertTrue(isinstance(
- list(self.db.get_all_docs()[1])[0], TestAlternativeDocument))
-
- def test_get_docs_conflicted_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- doc1 = self.db.create_doc(self.simple_doc)
- doc2 = self.make_document(doc1.doc_id, 'alternate:1', nested_doc)
- self.db._put_doc_if_newer(
- doc2, save_conflict=True, replica_uid='r', replica_gen=1,
- replica_trans_id='foo')
- self.assertTrue(
- isinstance(
- list(self.db.get_docs([doc1.doc_id]))[0],
- TestAlternativeDocument))
-
- def test_get_from_index_with_factory(self):
- self.db.set_document_factory(TestAlternativeDocument)
- self.db.create_doc(self.simple_doc)
- self.db.create_index('test-idx', 'key')
- self.assertTrue(
- isinstance(
- self.db.get_from_index('test-idx', 'value')[0],
- TestAlternativeDocument))
-
- def test_sync_exchange_updates_indexes(self):
- doc = self.db.create_doc(self.simple_doc)
- self.db.create_index('test-idx', 'key')
- new_content = '{"key": "altval"}'
- other_rev = 'test:1|z:2'
- st = self.db.get_sync_target()
-
- def ignore(doc_id, doc_rev, doc):
- pass
-
- doc_other = self.make_document(doc.doc_id, other_rev, new_content)
- docs_by_gen = [(doc_other, 10, 'T-sid')]
- st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=ignore)
- self.assertGetDoc(self.db, doc.doc_id, other_rev, new_content, False)
- self.assertEqual(
- [doc_other], self.db.get_from_index('test-idx', 'altval'))
- self.assertEqual([], self.db.get_from_index('test-idx', 'value'))
-
-
-# Use a custom loader to apply the scenarios at load time.
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/tests/u1db_tests/test_document.py b/src/leap/soledad/tests/u1db_tests/test_document.py
deleted file mode 100644
index e706e1a9..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_document.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-
-from u1db import errors
-
-from leap.soledad.tests import u1db_tests as tests
-
-
-class TestDocument(tests.TestCase):
-
- scenarios = ([(
- 'py', {'make_document_for_test': tests.make_document_for_test})]) # +
- #tests.C_DATABASE_SCENARIOS)
-
- def test_create_doc(self):
- doc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
- self.assertEqual('doc-id', doc.doc_id)
- self.assertEqual('uid:1', doc.rev)
- self.assertEqual(tests.simple_doc, doc.get_json())
- self.assertFalse(doc.has_conflicts)
-
- def test__repr__(self):
- doc = self.make_document('doc-id', 'uid:1', tests.simple_doc)
- self.assertEqual(
- '%s(doc-id, uid:1, \'{"key": "value"}\')'
- % (doc.__class__.__name__,),
- repr(doc))
-
- def test__repr__conflicted(self):
- doc = self.make_document('doc-id', 'uid:1', tests.simple_doc,
- has_conflicts=True)
- self.assertEqual(
- '%s(doc-id, uid:1, conflicted, \'{"key": "value"}\')'
- % (doc.__class__.__name__,),
- repr(doc))
-
- def test__lt__(self):
- doc_a = self.make_document('a', 'b', '{}')
- doc_b = self.make_document('b', 'b', '{}')
- self.assertTrue(doc_a < doc_b)
- self.assertTrue(doc_b > doc_a)
- doc_aa = self.make_document('a', 'a', '{}')
- self.assertTrue(doc_aa < doc_a)
-
- def test__eq__(self):
- doc_a = self.make_document('a', 'b', '{}')
- doc_b = self.make_document('a', 'b', '{}')
- self.assertTrue(doc_a == doc_b)
- doc_b = self.make_document('a', 'b', '{}', has_conflicts=True)
- self.assertFalse(doc_a == doc_b)
-
- def test_non_json_dict(self):
- self.assertRaises(
- errors.InvalidJSON, self.make_document, 'id', 'uid:1',
- '"not a json dictionary"')
-
- def test_non_json(self):
- self.assertRaises(
- errors.InvalidJSON, self.make_document, 'id', 'uid:1',
- 'not a json dictionary')
-
- def test_get_size(self):
- doc_a = self.make_document('a', 'b', '{"some": "content"}')
- self.assertEqual(
- len('a' + 'b' + '{"some": "content"}'), doc_a.get_size())
-
- def test_get_size_empty_document(self):
- doc_a = self.make_document('a', 'b', None)
- self.assertEqual(len('a' + 'b'), doc_a.get_size())
-
-
-class TestPyDocument(tests.TestCase):
-
- scenarios = ([(
- 'py', {'make_document_for_test': tests.make_document_for_test})])
-
- def test_get_content(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- self.assertEqual({"content": ""}, doc.content)
- doc.set_json('{"content": "new"}')
- self.assertEqual({"content": "new"}, doc.content)
-
- def test_set_content(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- doc.content = {"content": "new"}
- self.assertEqual('{"content": "new"}', doc.get_json())
-
- def test_set_bad_content(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- self.assertRaises(
- errors.InvalidContent, setattr, doc, 'content',
- '{"content": "new"}')
-
- def test_is_tombstone(self):
- doc_a = self.make_document('a', 'b', '{}')
- self.assertFalse(doc_a.is_tombstone())
- doc_a.set_json(None)
- self.assertTrue(doc_a.is_tombstone())
-
- def test_make_tombstone(self):
- doc_a = self.make_document('a', 'b', '{}')
- self.assertFalse(doc_a.is_tombstone())
- doc_a.make_tombstone()
- self.assertTrue(doc_a.is_tombstone())
-
- def test_same_content_as(self):
- doc_a = self.make_document('a', 'b', '{}')
- doc_b = self.make_document('d', 'e', '{}')
- self.assertTrue(doc_a.same_content_as(doc_b))
- doc_b = self.make_document('p', 'q', '{}', has_conflicts=True)
- self.assertTrue(doc_a.same_content_as(doc_b))
- doc_b.content['key'] = 'value'
- self.assertFalse(doc_a.same_content_as(doc_b))
-
- def test_same_content_as_json_order(self):
- doc_a = self.make_document(
- 'a', 'b', '{"key1": "val1", "key2": "val2"}')
- doc_b = self.make_document(
- 'c', 'd', '{"key2": "val2", "key1": "val1"}')
- self.assertTrue(doc_a.same_content_as(doc_b))
-
- def test_set_json(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- doc.set_json('{"content": "new"}')
- self.assertEqual('{"content": "new"}', doc.get_json())
-
- def test_set_json_non_dict(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- self.assertRaises(errors.InvalidJSON, doc.set_json, '"is not a dict"')
-
- def test_set_json_error(self):
- doc = self.make_document('id', 'rev', '{"content":""}')
- self.assertRaises(errors.InvalidJSON, doc.set_json, 'is not json')
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/tests/u1db_tests/test_http_app.py b/src/leap/soledad/tests/u1db_tests/test_http_app.py
deleted file mode 100644
index e0729aa2..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_http_app.py
+++ /dev/null
@@ -1,1135 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test the WSGI app."""
-
-import paste.fixture
-import sys
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-import StringIO
-
-from u1db import (
- __version__ as _u1db_version,
- errors,
- sync,
-)
-
-from leap.soledad.tests import u1db_tests as tests
-
-from u1db.remote import (
- http_app,
- http_errors,
-)
-
-
-class TestFencedReader(tests.TestCase):
-
- def test_init(self):
- reader = http_app._FencedReader(StringIO.StringIO(""), 25, 100)
- self.assertEqual(25, reader.remaining)
-
- def test_read_chunk(self):
- inp = StringIO.StringIO("abcdef")
- reader = http_app._FencedReader(inp, 5, 10)
- data = reader.read_chunk(2)
- self.assertEqual("ab", data)
- self.assertEqual(2, inp.tell())
- self.assertEqual(3, reader.remaining)
-
- def test_read_chunk_remaining(self):
- inp = StringIO.StringIO("abcdef")
- reader = http_app._FencedReader(inp, 4, 10)
- data = reader.read_chunk(9999)
- self.assertEqual("abcd", data)
- self.assertEqual(4, inp.tell())
- self.assertEqual(0, reader.remaining)
-
- def test_read_chunk_nothing_left(self):
- inp = StringIO.StringIO("abc")
- reader = http_app._FencedReader(inp, 2, 10)
- reader.read_chunk(2)
- self.assertEqual(2, inp.tell())
- self.assertEqual(0, reader.remaining)
- data = reader.read_chunk(2)
- self.assertEqual("", data)
- self.assertEqual(2, inp.tell())
- self.assertEqual(0, reader.remaining)
-
- def test_read_chunk_kept(self):
- inp = StringIO.StringIO("abcde")
- reader = http_app._FencedReader(inp, 4, 10)
- reader._kept = "xyz"
- data = reader.read_chunk(2) # atmost ignored
- self.assertEqual("xyz", data)
- self.assertEqual(0, inp.tell())
- self.assertEqual(4, reader.remaining)
- self.assertIsNone(reader._kept)
-
- def test_getline(self):
- inp = StringIO.StringIO("abc\r\nde")
- reader = http_app._FencedReader(inp, 6, 10)
- reader.MAXCHUNK = 6
- line = reader.getline()
- self.assertEqual("abc\r\n", line)
- self.assertEqual("d", reader._kept)
-
- def test_getline_exact(self):
- inp = StringIO.StringIO("abcd\r\nef")
- reader = http_app._FencedReader(inp, 6, 10)
- reader.MAXCHUNK = 6
- line = reader.getline()
- self.assertEqual("abcd\r\n", line)
- self.assertIs(None, reader._kept)
-
- def test_getline_no_newline(self):
- inp = StringIO.StringIO("abcd")
- reader = http_app._FencedReader(inp, 4, 10)
- reader.MAXCHUNK = 6
- line = reader.getline()
- self.assertEqual("abcd", line)
-
- def test_getline_many_chunks(self):
- inp = StringIO.StringIO("abcde\r\nf")
- reader = http_app._FencedReader(inp, 8, 10)
- reader.MAXCHUNK = 4
- line = reader.getline()
- self.assertEqual("abcde\r\n", line)
- self.assertEqual("f", reader._kept)
- line = reader.getline()
- self.assertEqual("f", line)
-
- def test_getline_empty(self):
- inp = StringIO.StringIO("")
- reader = http_app._FencedReader(inp, 0, 10)
- reader.MAXCHUNK = 4
- line = reader.getline()
- self.assertEqual("", line)
- line = reader.getline()
- self.assertEqual("", line)
-
- def test_getline_just_newline(self):
- inp = StringIO.StringIO("\r\n")
- reader = http_app._FencedReader(inp, 2, 10)
- reader.MAXCHUNK = 4
- line = reader.getline()
- self.assertEqual("\r\n", line)
- line = reader.getline()
- self.assertEqual("", line)
-
- def test_getline_too_large(self):
- inp = StringIO.StringIO("x" * 50)
- reader = http_app._FencedReader(inp, 50, 25)
- reader.MAXCHUNK = 4
- self.assertRaises(http_app.BadRequest, reader.getline)
-
- def test_getline_too_large_complete(self):
- inp = StringIO.StringIO("x" * 25 + "\r\n")
- reader = http_app._FencedReader(inp, 50, 25)
- reader.MAXCHUNK = 4
- self.assertRaises(http_app.BadRequest, reader.getline)
-
-
-class TestHTTPMethodDecorator(tests.TestCase):
-
- def test_args(self):
- @http_app.http_method()
- def f(self, a, b):
- return self, a, b
- res = f("self", {"a": "x", "b": "y"}, None)
- self.assertEqual(("self", "x", "y"), res)
-
- def test_args_missing(self):
- @http_app.http_method()
- def f(self, a, b):
- return a, b
- self.assertRaises(http_app.BadRequest, f, "self", {"a": "x"}, None)
-
- def test_args_unexpected(self):
- @http_app.http_method()
- def f(self, a):
- return a
- self.assertRaises(http_app.BadRequest, f, "self",
- {"a": "x", "c": "z"}, None)
-
- def test_args_default(self):
- @http_app.http_method()
- def f(self, a, b="z"):
- return a, b
- res = f("self", {"a": "x"}, None)
- self.assertEqual(("x", "z"), res)
-
- def test_args_conversion(self):
- @http_app.http_method(b=int)
- def f(self, a, b):
- return self, a, b
- res = f("self", {"a": "x", "b": "2"}, None)
- self.assertEqual(("self", "x", 2), res)
-
- self.assertRaises(http_app.BadRequest, f, "self",
- {"a": "x", "b": "foo"}, None)
-
- def test_args_conversion_with_default(self):
- @http_app.http_method(b=str)
- def f(self, a, b=None):
- return self, a, b
- res = f("self", {"a": "x"}, None)
- self.assertEqual(("self", "x", None), res)
-
- def test_args_content(self):
- @http_app.http_method()
- def f(self, a, content):
- return a, content
- res = f(self, {"a": "x"}, "CONTENT")
- self.assertEqual(("x", "CONTENT"), res)
-
- def test_args_content_as_args(self):
- @http_app.http_method(b=int, content_as_args=True)
- def f(self, a, b):
- return self, a, b
- res = f("self", {"a": "x"}, '{"b": "2"}')
- self.assertEqual(("self", "x", 2), res)
-
- self.assertRaises(http_app.BadRequest, f, "self", {}, 'not-json')
-
- def test_args_content_no_query(self):
- @http_app.http_method(no_query=True,
- content_as_args=True)
- def f(self, a='a', b='b'):
- return a, b
- res = f("self", {}, '{"b": "y"}')
- self.assertEqual(('a', 'y'), res)
-
- self.assertRaises(http_app.BadRequest, f, "self", {'a': 'x'},
- '{"b": "y"}')
-
-
-class TestResource(object):
-
- @http_app.http_method()
- def get(self, a, b):
- self.args = dict(a=a, b=b)
- return 'Get'
-
- @http_app.http_method()
- def put(self, a, content):
- self.args = dict(a=a)
- self.content = content
- return 'Put'
-
- @http_app.http_method(content_as_args=True)
- def put_args(self, a, b):
- self.args = dict(a=a, b=b)
- self.order = ['a']
- self.entries = []
-
- @http_app.http_method()
- def put_stream_entry(self, content):
- self.entries.append(content)
- self.order.append('s')
-
- def put_end(self):
- self.order.append('e')
- return "Put/end"
-
-
-class parameters:
- max_request_size = 200000
- max_entry_size = 100000
-
-
-class TestHTTPInvocationByMethodWithBody(tests.TestCase):
-
- def test_get(self):
- resource = TestResource()
- environ = {'QUERY_STRING': 'a=1&b=2', 'REQUEST_METHOD': 'GET'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- res = invoke()
- self.assertEqual('Get', res)
- self.assertEqual({'a': '1', 'b': '2'}, resource.args)
-
- def test_put_json(self):
- resource = TestResource()
- body = '{"body": true}'
- environ = {'QUERY_STRING': 'a=1', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO(body),
- 'CONTENT_LENGTH': str(len(body)),
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- res = invoke()
- self.assertEqual('Put', res)
- self.assertEqual({'a': '1'}, resource.args)
- self.assertEqual('{"body": true}', resource.content)
-
- def test_put_sync_stream(self):
- resource = TestResource()
- body = (
- '[\r\n'
- '{"b": 2},\r\n' # args
- '{"entry": "x"},\r\n' # stream entry
- '{"entry": "y"}\r\n' # stream entry
- ']'
- )
- environ = {'QUERY_STRING': 'a=1', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO(body),
- 'CONTENT_LENGTH': str(len(body)),
- 'CONTENT_TYPE': 'application/x-u1db-sync-stream'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- res = invoke()
- self.assertEqual('Put/end', res)
- self.assertEqual({'a': '1', 'b': 2}, resource.args)
- self.assertEqual(
- ['{"entry": "x"}', '{"entry": "y"}'], resource.entries)
- self.assertEqual(['a', 's', 's', 'e'], resource.order)
-
- def _put_sync_stream(self, body):
- resource = TestResource()
- environ = {'QUERY_STRING': 'a=1&b=2', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO(body),
- 'CONTENT_LENGTH': str(len(body)),
- 'CONTENT_TYPE': 'application/x-u1db-sync-stream'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- invoke()
-
- def test_put_sync_stream_wrong_start(self):
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "{}\r\n]")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "\r\n{}\r\n]")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "")
-
- def test_put_sync_stream_wrong_end(self):
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{}")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{}\r\n]\r\n...")
-
- def test_put_sync_stream_missing_comma(self):
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{}\r\n{}\r\n]")
-
- def test_put_sync_stream_extra_comma(self):
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{},\r\n]")
-
- self.assertRaises(http_app.BadRequest,
- self._put_sync_stream, "[\r\n{},\r\n{},\r\n]")
-
- def test_bad_request_decode_failure(self):
- resource = TestResource()
- environ = {'QUERY_STRING': 'a=\xff', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('{}'),
- 'CONTENT_LENGTH': '2',
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_unsupported_content_type(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('{}'),
- 'CONTENT_LENGTH': '2',
- 'CONTENT_TYPE': 'text/plain'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_content_length_too_large(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('{}'),
- 'CONTENT_LENGTH': '10000',
- 'CONTENT_TYPE': 'text/plain'}
-
- resource.max_request_size = 5000
- resource.max_entry_size = sys.maxint # we don't get to use this
-
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_no_content_length(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('a'),
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_invalid_content_length(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('abc'),
- 'CONTENT_LENGTH': '1unk',
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_empty_body(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO(''),
- 'CONTENT_LENGTH': '0',
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_unsupported_method_get_like(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'DELETE'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_unsupported_method_put_like(self):
- resource = TestResource()
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'PUT',
- 'wsgi.input': StringIO.StringIO('{}'),
- 'CONTENT_LENGTH': '2',
- 'CONTENT_TYPE': 'application/json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
- def test_bad_request_unsupported_method_put_like_multi_json(self):
- resource = TestResource()
- body = '{}\r\n{}\r\n'
- environ = {'QUERY_STRING': '', 'REQUEST_METHOD': 'POST',
- 'wsgi.input': StringIO.StringIO(body),
- 'CONTENT_LENGTH': str(len(body)),
- 'CONTENT_TYPE': 'application/x-u1db-multi-json'}
- invoke = http_app.HTTPInvocationByMethodWithBody(resource, environ,
- parameters)
- self.assertRaises(http_app.BadRequest, invoke)
-
-
-class TestHTTPResponder(tests.TestCase):
-
- def start_response(self, status, headers):
- self.status = status
- self.headers = dict(headers)
- self.response_body = []
-
- def write(data):
- self.response_body.append(data)
-
- return write
-
- def test_send_response_content_w_headers(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.send_response_content('foo', headers={'x-a': '1'})
- self.assertEqual('200 OK', self.status)
- self.assertEqual({'content-type': 'application/json',
- 'cache-control': 'no-cache',
- 'x-a': '1', 'content-length': '3'}, self.headers)
- self.assertEqual([], self.response_body)
- self.assertEqual(['foo'], responder.content)
-
- def test_send_response_json(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.send_response_json(value='success')
- self.assertEqual('200 OK', self.status)
- expected_body = '{"value": "success"}\r\n'
- self.assertEqual({'content-type': 'application/json',
- 'content-length': str(len(expected_body)),
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual([], self.response_body)
- self.assertEqual([expected_body], responder.content)
-
- def test_send_response_json_status_fail(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.send_response_json(400)
- self.assertEqual('400 Bad Request', self.status)
- expected_body = '{}\r\n'
- self.assertEqual({'content-type': 'application/json',
- 'content-length': str(len(expected_body)),
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual([], self.response_body)
- self.assertEqual([expected_body], responder.content)
-
- def test_start_finish_response_status_fail(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.start_response(404, {'error': 'not found'})
- responder.finish_response()
- self.assertEqual('404 Not Found', self.status)
- self.assertEqual({'content-type': 'application/json',
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual(['{"error": "not found"}\r\n'], self.response_body)
- self.assertEqual([], responder.content)
-
- def test_send_stream_entry(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.content_type = "application/x-u1db-multi-json"
- responder.start_response(200)
- responder.start_stream()
- responder.stream_entry({'entry': 1})
- responder.stream_entry({'entry': 2})
- responder.end_stream()
- responder.finish_response()
- self.assertEqual('200 OK', self.status)
- self.assertEqual({'content-type': 'application/x-u1db-multi-json',
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual(['[',
- '\r\n', '{"entry": 1}',
- ',\r\n', '{"entry": 2}',
- '\r\n]\r\n'], self.response_body)
- self.assertEqual([], responder.content)
-
- def test_send_stream_w_error(self):
- responder = http_app.HTTPResponder(self.start_response)
- responder.content_type = "application/x-u1db-multi-json"
- responder.start_response(200)
- responder.start_stream()
- responder.stream_entry({'entry': 1})
- responder.send_response_json(503, error="unavailable")
- self.assertEqual('200 OK', self.status)
- self.assertEqual({'content-type': 'application/x-u1db-multi-json',
- 'cache-control': 'no-cache'}, self.headers)
- self.assertEqual(['[',
- '\r\n', '{"entry": 1}'], self.response_body)
- self.assertEqual([',\r\n', '{"error": "unavailable"}\r\n'],
- responder.content)
-
-
-class TestHTTPApp(tests.TestCase):
-
- def setUp(self):
- super(TestHTTPApp, self).setUp()
- self.state = tests.ServerStateForTests()
- self.http_app = http_app.HTTPApp(self.state)
- self.app = paste.fixture.TestApp(self.http_app)
- self.db0 = self.state._create_database('db0')
-
- def test_bad_request_broken(self):
- resp = self.app.put('/db0/doc/doc1', params='{"x": 1}',
- headers={'content-type': 'application/foo'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_bad_request_dispatch(self):
- resp = self.app.put('/db0/foo/doc1', params='{"x": 1}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_version(self):
- resp = self.app.get('/')
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({"version": _u1db_version}, json.loads(resp.body))
-
- def test_create_database(self):
- resp = self.app.put('/db1', params='{}',
- headers={'content-type': 'application/json'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'ok': True}, json.loads(resp.body))
-
- resp = self.app.put('/db1', params='{}',
- headers={'content-type': 'application/json'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'ok': True}, json.loads(resp.body))
-
- def test_delete_database(self):
- resp = self.app.delete('/db0')
- self.assertEqual(200, resp.status)
- self.assertRaises(errors.DatabaseDoesNotExist,
- self.state.check_database, 'db0')
-
- def test_get_database(self):
- resp = self.app.get('/db0')
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({}, json.loads(resp.body))
-
- def test_valid_database_names(self):
- resp = self.app.get('/a-database', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- resp = self.app.get('/db1', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- resp = self.app.get('/0', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- resp = self.app.get('/0-0', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- resp = self.app.get('/org.future', expect_errors=True)
- self.assertEqual(404, resp.status)
-
- def test_invalid_database_names(self):
- resp = self.app.get('/.a', expect_errors=True)
- self.assertEqual(400, resp.status)
-
- resp = self.app.get('/-a', expect_errors=True)
- self.assertEqual(400, resp.status)
-
- resp = self.app.get('/_a', expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_put_doc_create(self):
- resp = self.app.put('/db0/doc/doc1', params='{"x": 1}',
- headers={'content-type': 'application/json'})
- doc = self.db0.get_doc('doc1')
- self.assertEqual(201, resp.status) # created
- self.assertEqual('{"x": 1}', doc.get_json())
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'rev': doc.rev}, json.loads(resp.body))
-
- def test_put_doc(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- resp = self.app.put('/db0/doc/doc1?old_rev=%s' % doc.rev,
- params='{"x": 2}',
- headers={'content-type': 'application/json'})
- doc = self.db0.get_doc('doc1')
- self.assertEqual(200, resp.status)
- self.assertEqual('{"x": 2}', doc.get_json())
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'rev': doc.rev}, json.loads(resp.body))
-
- def test_put_doc_too_large(self):
- self.http_app.max_request_size = 15000
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- resp = self.app.put('/db0/doc/doc1?old_rev=%s' % doc.rev,
- params='{"%s": 2}' % ('z' * 16000),
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_delete_doc(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- resp = self.app.delete('/db0/doc/doc1?old_rev=%s' % doc.rev)
- doc = self.db0.get_doc('doc1', include_deleted=True)
- self.assertEqual(None, doc.content)
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'rev': doc.rev}, json.loads(resp.body))
-
- def test_get_doc(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- resp = self.app.get('/db0/doc/%s' % doc.doc_id)
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual('{"x": 1}', resp.body)
- self.assertEqual(doc.rev, resp.header('x-u1db-rev'))
- self.assertEqual('false', resp.header('x-u1db-has-conflicts'))
-
- def test_get_doc_non_existing(self):
- resp = self.app.get('/db0/doc/not-there', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "document does not exist"}, json.loads(resp.body))
- self.assertEqual('', resp.header('x-u1db-rev'))
- self.assertEqual('false', resp.header('x-u1db-has-conflicts'))
-
- def test_get_doc_deleted(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- self.db0.delete_doc(doc)
- resp = self.app.get('/db0/doc/doc1', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": errors.DocumentDoesNotExist.wire_description},
- json.loads(resp.body))
-
- def test_get_doc_deleted_explicit_exclude(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- self.db0.delete_doc(doc)
- resp = self.app.get(
- '/db0/doc/doc1?include_deleted=false', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": errors.DocumentDoesNotExist.wire_description},
- json.loads(resp.body))
-
- def test_get_deleted_doc(self):
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- self.db0.delete_doc(doc)
- resp = self.app.get(
- '/db0/doc/doc1?include_deleted=true', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": errors.DOCUMENT_DELETED}, json.loads(resp.body))
- self.assertEqual(doc.rev, resp.header('x-u1db-rev'))
- self.assertEqual('false', resp.header('x-u1db-has-conflicts'))
-
- def test_get_doc_non_existing_dabase(self):
- resp = self.app.get('/not-there/doc/doc1', expect_errors=True)
- self.assertEqual(404, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "database does not exist"}, json.loads(resp.body))
-
- def test_get_docs(self):
- doc1 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- doc2 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc2')
- ids = ','.join([doc1.doc_id, doc2.doc_id])
- resp = self.app.get('/db0/docs?doc_ids=%s' % ids)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- 'application/json', resp.header('content-type'))
- expected = [
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc1",
- "has_conflicts": False},
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc2",
- "has_conflicts": False}]
- self.assertEqual(expected, json.loads(resp.body))
-
- def test_get_docs_missing_doc_ids(self):
- resp = self.app.get('/db0/docs', expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "missing document ids"}, json.loads(resp.body))
-
- def test_get_docs_empty_doc_ids(self):
- resp = self.app.get('/db0/docs?doc_ids=', expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(
- {"error": "missing document ids"}, json.loads(resp.body))
-
- def test_get_docs_percent(self):
- doc1 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc%1')
- doc2 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc2')
- ids = ','.join([doc1.doc_id, doc2.doc_id])
- resp = self.app.get('/db0/docs?doc_ids=%s' % ids)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- 'application/json', resp.header('content-type'))
- expected = [
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc%1",
- "has_conflicts": False},
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc2",
- "has_conflicts": False}]
- self.assertEqual(expected, json.loads(resp.body))
-
- def test_get_docs_deleted(self):
- doc1 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- doc2 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc2')
- self.db0.delete_doc(doc2)
- ids = ','.join([doc1.doc_id, doc2.doc_id])
- resp = self.app.get('/db0/docs?doc_ids=%s' % ids)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- 'application/json', resp.header('content-type'))
- expected = [
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc1",
- "has_conflicts": False}]
- self.assertEqual(expected, json.loads(resp.body))
-
- def test_get_docs_include_deleted(self):
- doc1 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- doc2 = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc2')
- self.db0.delete_doc(doc2)
- ids = ','.join([doc1.doc_id, doc2.doc_id])
- resp = self.app.get('/db0/docs?doc_ids=%s&include_deleted=true' % ids)
- self.assertEqual(200, resp.status)
- self.assertEqual(
- 'application/json', resp.header('content-type'))
- expected = [
- {"content": '{"x": 1}', "doc_rev": "db0:1", "doc_id": "doc1",
- "has_conflicts": False},
- {"content": None, "doc_rev": "db0:2", "doc_id": "doc2",
- "has_conflicts": False}]
- self.assertEqual(expected, json.loads(resp.body))
-
- def test_get_sync_info(self):
- self.db0._set_replica_gen_and_trans_id('other-id', 1, 'T-transid')
- resp = self.app.get('/db0/sync-from/other-id')
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual(dict(target_replica_uid='db0',
- target_replica_generation=0,
- target_replica_transaction_id='',
- source_replica_uid='other-id',
- source_replica_generation=1,
- source_transaction_id='T-transid'),
- json.loads(resp.body))
-
- def test_record_sync_info(self):
- resp = self.app.put('/db0/sync-from/other-id',
- params='{"generation": 2, "transaction_id": '
- '"T-transid"}',
- headers={'content-type': 'application/json'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({'ok': True}, json.loads(resp.body))
- self.assertEqual(
- (2, 'T-transid'),
- self.db0._get_replica_gen_and_trans_id('other-id'))
-
- def test_sync_exchange_send(self):
- entries = {
- 10: {'id': 'doc-here', 'rev': 'replica:1', 'content':
- '{"value": "here"}', 'gen': 10, 'trans_id': 'T-sid'},
- 11: {'id': 'doc-here2', 'rev': 'replica:1', 'content':
- '{"value": "here2"}', 'gen': 11, 'trans_id': 'T-sed'}
- }
-
- gens = []
- _do_set_replica_gen_and_trans_id = \
- self.db0._do_set_replica_gen_and_trans_id
-
- def set_sync_generation_witness(other_uid, other_gen, other_trans_id):
- gens.append((other_uid, other_gen))
- _do_set_replica_gen_and_trans_id(
- other_uid, other_gen, other_trans_id)
- self.assertGetDoc(self.db0, entries[other_gen]['id'],
- entries[other_gen]['rev'],
- entries[other_gen]['content'], False)
-
- self.patch(
- self.db0, '_do_set_replica_gen_and_trans_id',
- set_sync_generation_witness)
-
- args = dict(last_known_generation=0)
- body = ("[\r\n" +
- "%s,\r\n" % json.dumps(args) +
- "%s,\r\n" % json.dumps(entries[10]) +
- "%s\r\n" % json.dumps(entries[11]) +
- "]\r\n")
- resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/x-u1db-sync-stream',
- resp.header('content-type'))
- bits = resp.body.split('\r\n')
- self.assertEqual('[', bits[0])
- last_trans_id = self.db0._get_transaction_log()[-1][1]
- self.assertEqual({'new_generation': 2,
- 'new_transaction_id': last_trans_id},
- json.loads(bits[1]))
- self.assertEqual(']', bits[2])
- self.assertEqual('', bits[3])
- self.assertEqual([('replica', 10), ('replica', 11)], gens)
-
- def test_sync_exchange_send_ensure(self):
- entries = {
- 10: {'id': 'doc-here', 'rev': 'replica:1', 'content':
- '{"value": "here"}', 'gen': 10, 'trans_id': 'T-sid'},
- 11: {'id': 'doc-here2', 'rev': 'replica:1', 'content':
- '{"value": "here2"}', 'gen': 11, 'trans_id': 'T-sed'}
- }
-
- args = dict(last_known_generation=0, ensure=True)
- body = ("[\r\n" +
- "%s,\r\n" % json.dumps(args) +
- "%s,\r\n" % json.dumps(entries[10]) +
- "%s\r\n" % json.dumps(entries[11]) +
- "]\r\n")
- resp = self.app.post('/dbnew/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/x-u1db-sync-stream',
- resp.header('content-type'))
- bits = resp.body.split('\r\n')
- self.assertEqual('[', bits[0])
- dbnew = self.state.open_database("dbnew")
- last_trans_id = dbnew._get_transaction_log()[-1][1]
- self.assertEqual({'new_generation': 2,
- 'new_transaction_id': last_trans_id,
- 'replica_uid': dbnew._replica_uid},
- json.loads(bits[1]))
- self.assertEqual(']', bits[2])
- self.assertEqual('', bits[3])
-
- def test_sync_exchange_send_entry_too_large(self):
- self.patch(http_app.SyncResource, 'max_request_size', 20000)
- self.patch(http_app.SyncResource, 'max_entry_size', 10000)
- entries = {
- 10: {'id': 'doc-here', 'rev': 'replica:1', 'content':
- '{"value": "%s"}' % ('H' * 11000), 'gen': 10},
- }
- args = dict(last_known_generation=0)
- body = ("[\r\n" +
- "%s,\r\n" % json.dumps(args) +
- "%s\r\n" % json.dumps(entries[10]) +
- "]\r\n")
- resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
-
- def test_sync_exchange_receive(self):
- doc = self.db0.create_doc_from_json('{"value": "there"}')
- doc2 = self.db0.create_doc_from_json('{"value": "there2"}')
- args = dict(last_known_generation=0)
- body = "[\r\n%s\r\n]" % json.dumps(args)
- resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/x-u1db-sync-stream',
- resp.header('content-type'))
- parts = resp.body.splitlines()
- self.assertEqual(5, len(parts))
- self.assertEqual('[', parts[0])
- last_trans_id = self.db0._get_transaction_log()[-1][1]
- self.assertEqual({'new_generation': 2,
- 'new_transaction_id': last_trans_id},
- json.loads(parts[1].rstrip(",")))
- part2 = json.loads(parts[2].rstrip(","))
- self.assertTrue(part2['trans_id'].startswith('T-'))
- self.assertEqual('{"value": "there"}', part2['content'])
- self.assertEqual(doc.rev, part2['rev'])
- self.assertEqual(doc.doc_id, part2['id'])
- self.assertEqual(1, part2['gen'])
- part3 = json.loads(parts[3].rstrip(","))
- self.assertTrue(part3['trans_id'].startswith('T-'))
- self.assertEqual('{"value": "there2"}', part3['content'])
- self.assertEqual(doc2.rev, part3['rev'])
- self.assertEqual(doc2.doc_id, part3['id'])
- self.assertEqual(2, part3['gen'])
- self.assertEqual(']', parts[4])
-
- def test_sync_exchange_error_in_stream(self):
- args = dict(last_known_generation=0)
- body = "[\r\n%s\r\n]" % json.dumps(args)
-
- def boom(self, return_doc_cb):
- raise errors.Unavailable
-
- self.patch(sync.SyncExchange, 'return_docs',
- boom)
- resp = self.app.post('/db0/sync-from/replica',
- params=body,
- headers={'content-type':
- 'application/x-u1db-sync-stream'})
- self.assertEqual(200, resp.status)
- self.assertEqual('application/x-u1db-sync-stream',
- resp.header('content-type'))
- parts = resp.body.splitlines()
- self.assertEqual(3, len(parts))
- self.assertEqual('[', parts[0])
- self.assertEqual({'new_generation': 0, 'new_transaction_id': ''},
- json.loads(parts[1].rstrip(",")))
- self.assertEqual({'error': 'unavailable'}, json.loads(parts[2]))
-
-
-class TestRequestHooks(tests.TestCase):
-
- def setUp(self):
- super(TestRequestHooks, self).setUp()
- self.state = tests.ServerStateForTests()
- self.http_app = http_app.HTTPApp(self.state)
- self.app = paste.fixture.TestApp(self.http_app)
- self.db0 = self.state._create_database('db0')
-
- def test_begin_and_done(self):
- calls = []
-
- def begin(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('begin')
-
- def done(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('done')
-
- self.http_app.request_begin = begin
- self.http_app.request_done = done
-
- doc = self.db0.create_doc_from_json('{"x": 1}', doc_id='doc1')
- self.app.get('/db0/doc/%s' % doc.doc_id)
-
- self.assertEqual(['begin', 'done'], calls)
-
- def test_bad_request(self):
- calls = []
-
- def begin(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('begin')
-
- def bad_request(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('bad-request')
-
- self.http_app.request_begin = begin
- self.http_app.request_bad_request = bad_request
- # shouldn't be called
- self.http_app.request_done = lambda env: 1 / 0
-
- resp = self.app.put('/db0/foo/doc1', params='{"x": 1}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(400, resp.status)
- self.assertEqual(['begin', 'bad-request'], calls)
-
-
-class TestHTTPErrors(tests.TestCase):
-
- def test_wire_description_to_status(self):
- self.assertNotIn("error", http_errors.wire_description_to_status)
-
-
-class TestHTTPAppErrorHandling(tests.TestCase):
-
- def setUp(self):
- super(TestHTTPAppErrorHandling, self).setUp()
- self.exc = None
- self.state = tests.ServerStateForTests()
-
- class ErroringResource(object):
-
- def post(_, args, content):
- raise self.exc
-
- def lookup_resource(environ, responder):
- return ErroringResource()
-
- self.http_app = http_app.HTTPApp(self.state)
- self.http_app._lookup_resource = lookup_resource
- self.app = paste.fixture.TestApp(self.http_app)
-
- def test_RevisionConflict_etc(self):
- self.exc = errors.RevisionConflict()
- resp = self.app.post('/req', params='{}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(409, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({"error": "revision conflict"},
- json.loads(resp.body))
-
- def test_Unavailable(self):
- self.exc = errors.Unavailable
- resp = self.app.post('/req', params='{}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(503, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({"error": "unavailable"},
- json.loads(resp.body))
-
- def test_generic_u1db_errors(self):
- self.exc = errors.U1DBError()
- resp = self.app.post('/req', params='{}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(500, resp.status)
- self.assertEqual('application/json', resp.header('content-type'))
- self.assertEqual({"error": "error"},
- json.loads(resp.body))
-
- def test_generic_u1db_errors_hooks(self):
- calls = []
-
- def begin(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append('begin')
-
- def u1db_error(environ, exc):
- self.assertTrue('PATH_INFO' in environ)
- calls.append(('error', exc))
-
- self.http_app.request_begin = begin
- self.http_app.request_u1db_error = u1db_error
- # shouldn't be called
- self.http_app.request_done = lambda env: 1 / 0
-
- self.exc = errors.U1DBError()
- resp = self.app.post('/req', params='{}',
- headers={'content-type': 'application/json'},
- expect_errors=True)
- self.assertEqual(500, resp.status)
- self.assertEqual(['begin', ('error', self.exc)], calls)
-
- def test_failure(self):
- class Failure(Exception):
- pass
- self.exc = Failure()
- self.assertRaises(Failure, self.app.post, '/req', params='{}',
- headers={'content-type': 'application/json'})
-
- def test_failure_hooks(self):
- class Failure(Exception):
- pass
- calls = []
-
- def begin(environ):
- calls.append('begin')
-
- def failed(environ):
- self.assertTrue('PATH_INFO' in environ)
- calls.append(('failed', sys.exc_info()))
-
- self.http_app.request_begin = begin
- self.http_app.request_failed = failed
- # shouldn't be called
- self.http_app.request_done = lambda env: 1 / 0
-
- self.exc = Failure()
- self.assertRaises(Failure, self.app.post, '/req', params='{}',
- headers={'content-type': 'application/json'})
-
- self.assertEqual(2, len(calls))
- self.assertEqual('begin', calls[0])
- marker, (exc_type, exc, tb) = calls[1]
- self.assertEqual('failed', marker)
- self.assertEqual(self.exc, exc)
-
-
-class TestPluggableSyncExchange(tests.TestCase):
-
- def setUp(self):
- super(TestPluggableSyncExchange, self).setUp()
- self.state = tests.ServerStateForTests()
- self.state.ensure_database('foo')
-
- def test_plugging(self):
-
- class MySyncExchange(object):
- def __init__(self, db, source_replica_uid, last_known_generation):
- pass
-
- class MySyncResource(http_app.SyncResource):
- sync_exchange_class = MySyncExchange
-
- sync_res = MySyncResource('foo', 'src', self.state, None)
- sync_res.post_args(
- {'last_known_generation': 0, 'last_known_trans_id': None}, '{}')
- self.assertIsInstance(sync_res.sync_exch, MySyncExchange)
diff --git a/src/leap/soledad/tests/u1db_tests/test_http_client.py b/src/leap/soledad/tests/u1db_tests/test_http_client.py
deleted file mode 100644
index 42e98461..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_http_client.py
+++ /dev/null
@@ -1,363 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for HTTPDatabase"""
-
-from oauth import oauth
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-from u1db import (
- errors,
-)
-
-from leap.soledad.tests import u1db_tests as tests
-
-from u1db.remote import (
- http_client,
-)
-
-
-class TestEncoder(tests.TestCase):
-
- def test_encode_string(self):
- self.assertEqual("foo", http_client._encode_query_parameter("foo"))
-
- def test_encode_true(self):
- self.assertEqual("true", http_client._encode_query_parameter(True))
-
- def test_encode_false(self):
- self.assertEqual("false", http_client._encode_query_parameter(False))
-
-
-class TestHTTPClientBase(tests.TestCaseWithServer):
-
- def setUp(self):
- super(TestHTTPClientBase, self).setUp()
- self.errors = 0
-
- def app(self, environ, start_response):
- if environ['PATH_INFO'].endswith('echo'):
- start_response("200 OK", [('Content-Type', 'application/json')])
- ret = {}
- for name in ('REQUEST_METHOD', 'PATH_INFO', 'QUERY_STRING'):
- ret[name] = environ[name]
- if environ['REQUEST_METHOD'] in ('PUT', 'POST'):
- ret['CONTENT_TYPE'] = environ['CONTENT_TYPE']
- content_length = int(environ['CONTENT_LENGTH'])
- ret['body'] = environ['wsgi.input'].read(content_length)
- return [json.dumps(ret)]
- elif environ['PATH_INFO'].endswith('error_then_accept'):
- if self.errors >= 3:
- start_response(
- "200 OK", [('Content-Type', 'application/json')])
- ret = {}
- for name in ('REQUEST_METHOD', 'PATH_INFO', 'QUERY_STRING'):
- ret[name] = environ[name]
- if environ['REQUEST_METHOD'] in ('PUT', 'POST'):
- ret['CONTENT_TYPE'] = environ['CONTENT_TYPE']
- content_length = int(environ['CONTENT_LENGTH'])
- ret['body'] = '{"oki": "doki"}'
- return [json.dumps(ret)]
- self.errors += 1
- content_length = int(environ['CONTENT_LENGTH'])
- error = json.loads(
- environ['wsgi.input'].read(content_length))
- response = error['response']
- # In debug mode, wsgiref has an assertion that the status parameter
- # is a 'str' object. However error['status'] returns a unicode
- # object.
- status = str(error['status'])
- if isinstance(response, unicode):
- response = str(response)
- if isinstance(response, str):
- start_response(status, [('Content-Type', 'text/plain')])
- return [str(response)]
- else:
- start_response(status, [('Content-Type', 'application/json')])
- return [json.dumps(response)]
- elif environ['PATH_INFO'].endswith('error'):
- self.errors += 1
- content_length = int(environ['CONTENT_LENGTH'])
- error = json.loads(
- environ['wsgi.input'].read(content_length))
- response = error['response']
- # In debug mode, wsgiref has an assertion that the status parameter
- # is a 'str' object. However error['status'] returns a unicode
- # object.
- status = str(error['status'])
- if isinstance(response, unicode):
- response = str(response)
- if isinstance(response, str):
- start_response(status, [('Content-Type', 'text/plain')])
- return [str(response)]
- else:
- start_response(status, [('Content-Type', 'application/json')])
- return [json.dumps(response)]
- elif '/oauth' in environ['PATH_INFO']:
- base_url = self.getURL('').rstrip('/')
- oauth_req = oauth.OAuthRequest.from_request(
- http_method=environ['REQUEST_METHOD'],
- http_url=base_url + environ['PATH_INFO'],
- headers={'Authorization': environ['HTTP_AUTHORIZATION']},
- query_string=environ['QUERY_STRING']
- )
- oauth_server = oauth.OAuthServer(tests.testingOAuthStore)
- oauth_server.add_signature_method(tests.sign_meth_HMAC_SHA1)
- try:
- consumer, token, params = oauth_server.verify_request(
- oauth_req)
- except oauth.OAuthError, e:
- start_response("401 Unauthorized",
- [('Content-Type', 'application/json')])
- return [json.dumps({"error": "unauthorized",
- "message": e.message})]
- start_response("200 OK", [('Content-Type', 'application/json')])
- return [json.dumps([environ['PATH_INFO'], token.key, params])]
-
- def make_app(self):
- return self.app
-
- def getClient(self, **kwds):
- self.startServer()
- return http_client.HTTPClientBase(self.getURL('dbase'), **kwds)
-
- def test_construct(self):
- self.startServer()
- url = self.getURL()
- cli = http_client.HTTPClientBase(url)
- self.assertEqual(url, cli._url.geturl())
- self.assertIs(None, cli._conn)
-
- def test_parse_url(self):
- cli = http_client.HTTPClientBase(
- '%s://127.0.0.1:12345/' % self.url_scheme)
- self.assertEqual(self.url_scheme, cli._url.scheme)
- self.assertEqual('127.0.0.1', cli._url.hostname)
- self.assertEqual(12345, cli._url.port)
- self.assertEqual('/', cli._url.path)
-
- def test__ensure_connection(self):
- cli = self.getClient()
- self.assertIs(None, cli._conn)
- cli._ensure_connection()
- self.assertIsNot(None, cli._conn)
- conn = cli._conn
- cli._ensure_connection()
- self.assertIs(conn, cli._conn)
-
- def test_close(self):
- cli = self.getClient()
- cli._ensure_connection()
- cli.close()
- self.assertIs(None, cli._conn)
-
- def test__request(self):
- cli = self.getClient()
- res, headers = cli._request('PUT', ['echo'], {}, {})
- self.assertEqual({'CONTENT_TYPE': 'application/json',
- 'PATH_INFO': '/dbase/echo',
- 'QUERY_STRING': '',
- 'body': '{}',
- 'REQUEST_METHOD': 'PUT'}, json.loads(res))
-
- res, headers = cli._request('GET', ['doc', 'echo'], {'a': 1})
- self.assertEqual({'PATH_INFO': '/dbase/doc/echo',
- 'QUERY_STRING': 'a=1',
- 'REQUEST_METHOD': 'GET'}, json.loads(res))
-
- res, headers = cli._request('GET', ['doc', '%FFFF', 'echo'], {'a': 1})
- self.assertEqual({'PATH_INFO': '/dbase/doc/%FFFF/echo',
- 'QUERY_STRING': 'a=1',
- 'REQUEST_METHOD': 'GET'}, json.loads(res))
-
- res, headers = cli._request('POST', ['echo'], {'b': 2}, 'Body',
- 'application/x-test')
- self.assertEqual({'CONTENT_TYPE': 'application/x-test',
- 'PATH_INFO': '/dbase/echo',
- 'QUERY_STRING': 'b=2',
- 'body': 'Body',
- 'REQUEST_METHOD': 'POST'}, json.loads(res))
-
- def test__request_json(self):
- cli = self.getClient()
- res, headers = cli._request_json(
- 'POST', ['echo'], {'b': 2}, {'a': 'x'})
- self.assertEqual('application/json', headers['content-type'])
- self.assertEqual({'CONTENT_TYPE': 'application/json',
- 'PATH_INFO': '/dbase/echo',
- 'QUERY_STRING': 'b=2',
- 'body': '{"a": "x"}',
- 'REQUEST_METHOD': 'POST'}, res)
-
- def test_unspecified_http_error(self):
- cli = self.getClient()
- self.assertRaises(errors.HTTPError,
- cli._request_json, 'POST', ['error'], {},
- {'status': "500 Internal Error",
- 'response': "Crash."})
- try:
- cli._request_json('POST', ['error'], {},
- {'status': "500 Internal Error",
- 'response': "Fail."})
- except errors.HTTPError, e:
- pass
-
- self.assertEqual(500, e.status)
- self.assertEqual("Fail.", e.message)
- self.assertTrue("content-type" in e.headers)
-
- def test_revision_conflict(self):
- cli = self.getClient()
- self.assertRaises(errors.RevisionConflict,
- cli._request_json, 'POST', ['error'], {},
- {'status': "409 Conflict",
- 'response': {"error": "revision conflict"}})
-
- def test_unavailable_proper(self):
- cli = self.getClient()
- cli._delays = (0, 0, 0, 0, 0)
- self.assertRaises(errors.Unavailable,
- cli._request_json, 'POST', ['error'], {},
- {'status': "503 Service Unavailable",
- 'response': {"error": "unavailable"}})
- self.assertEqual(5, self.errors)
-
- def test_unavailable_then_available(self):
- cli = self.getClient()
- cli._delays = (0, 0, 0, 0, 0)
- res, headers = cli._request_json(
- 'POST', ['error_then_accept'], {'b': 2},
- {'status': "503 Service Unavailable",
- 'response': {"error": "unavailable"}})
- self.assertEqual('application/json', headers['content-type'])
- self.assertEqual({'CONTENT_TYPE': 'application/json',
- 'PATH_INFO': '/dbase/error_then_accept',
- 'QUERY_STRING': 'b=2',
- 'body': '{"oki": "doki"}',
- 'REQUEST_METHOD': 'POST'}, res)
- self.assertEqual(3, self.errors)
-
- def test_unavailable_random_source(self):
- cli = self.getClient()
- cli._delays = (0, 0, 0, 0, 0)
- try:
- cli._request_json('POST', ['error'], {},
- {'status': "503 Service Unavailable",
- 'response': "random unavailable."})
- except errors.Unavailable, e:
- pass
-
- self.assertEqual(503, e.status)
- self.assertEqual("random unavailable.", e.message)
- self.assertTrue("content-type" in e.headers)
- self.assertEqual(5, self.errors)
-
- def test_document_too_big(self):
- cli = self.getClient()
- self.assertRaises(errors.DocumentTooBig,
- cli._request_json, 'POST', ['error'], {},
- {'status': "403 Forbidden",
- 'response': {"error": "document too big"}})
-
- def test_user_quota_exceeded(self):
- cli = self.getClient()
- self.assertRaises(errors.UserQuotaExceeded,
- cli._request_json, 'POST', ['error'], {},
- {'status': "403 Forbidden",
- 'response': {"error": "user quota exceeded"}})
-
- def test_user_needs_subscription(self):
- cli = self.getClient()
- self.assertRaises(errors.SubscriptionNeeded,
- cli._request_json, 'POST', ['error'], {},
- {'status': "403 Forbidden",
- 'response': {"error": "user needs subscription"}})
-
- def test_generic_u1db_error(self):
- cli = self.getClient()
- self.assertRaises(errors.U1DBError,
- cli._request_json, 'POST', ['error'], {},
- {'status': "400 Bad Request",
- 'response': {"error": "error"}})
- try:
- cli._request_json('POST', ['error'], {},
- {'status': "400 Bad Request",
- 'response': {"error": "error"}})
- except errors.U1DBError, e:
- pass
- self.assertIs(e.__class__, errors.U1DBError)
-
- def test_unspecified_bad_request(self):
- cli = self.getClient()
- self.assertRaises(errors.HTTPError,
- cli._request_json, 'POST', ['error'], {},
- {'status': "400 Bad Request",
- 'response': "<Bad Request>"})
- try:
- cli._request_json('POST', ['error'], {},
- {'status': "400 Bad Request",
- 'response': "<Bad Request>"})
- except errors.HTTPError, e:
- pass
-
- self.assertEqual(400, e.status)
- self.assertEqual("<Bad Request>", e.message)
- self.assertTrue("content-type" in e.headers)
-
- def test_oauth(self):
- cli = self.getClient()
- cli.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- params = {'x': u'\xf0', 'y': "foo"}
- res, headers = cli._request('GET', ['doc', 'oauth'], params)
- self.assertEqual(
- ['/dbase/doc/oauth', tests.token1.key, params], json.loads(res))
-
- # oauth does its own internal quoting
- params = {'x': u'\xf0', 'y': "foo"}
- res, headers = cli._request('GET', ['doc', 'oauth', 'foo bar'], params)
- self.assertEqual(
- ['/dbase/doc/oauth/foo bar', tests.token1.key, params],
- json.loads(res))
-
- def test_oauth_ctr_creds(self):
- cli = self.getClient(creds={'oauth': {
- 'consumer_key': tests.consumer1.key,
- 'consumer_secret': tests.consumer1.secret,
- 'token_key': tests.token1.key,
- 'token_secret': tests.token1.secret,
- }})
- params = {'x': u'\xf0', 'y': "foo"}
- res, headers = cli._request('GET', ['doc', 'oauth'], params)
- self.assertEqual(
- ['/dbase/doc/oauth', tests.token1.key, params], json.loads(res))
-
- def test_unknown_creds(self):
- self.assertRaises(errors.UnknownAuthMethod,
- self.getClient, creds={'foo': {}})
- self.assertRaises(errors.UnknownAuthMethod,
- self.getClient, creds={})
-
- def test_oauth_Unauthorized(self):
- cli = self.getClient()
- cli.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, "WRONG")
- params = {'y': 'foo'}
- self.assertRaises(errors.Unauthorized, cli._request, 'GET',
- ['doc', 'oauth'], params)
diff --git a/src/leap/soledad/tests/u1db_tests/test_http_database.py b/src/leap/soledad/tests/u1db_tests/test_http_database.py
deleted file mode 100644
index f21e6da1..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_http_database.py
+++ /dev/null
@@ -1,260 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for HTTPDatabase"""
-
-import inspect
-try:
- import simplejson as json
-except ImportError:
- import json # noqa
-
-from u1db import (
- errors,
- Document,
-)
-
-from leap.soledad.tests import u1db_tests as tests
-
-from u1db.remote import (
- http_database,
- http_target,
-)
-from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
- make_http_app,
-)
-
-
-class TestHTTPDatabaseSimpleOperations(tests.TestCase):
-
- def setUp(self):
- super(TestHTTPDatabaseSimpleOperations, self).setUp()
- self.db = http_database.HTTPDatabase('dbase')
- self.db._conn = object() # crash if used
- self.got = None
- self.response_val = None
-
- def _request(method, url_parts, params=None, body=None,
- content_type=None):
- self.got = method, url_parts, params, body, content_type
- if isinstance(self.response_val, Exception):
- raise self.response_val
- return self.response_val
-
- def _request_json(method, url_parts, params=None, body=None,
- content_type=None):
- self.got = method, url_parts, params, body, content_type
- if isinstance(self.response_val, Exception):
- raise self.response_val
- return self.response_val
-
- self.db._request = _request
- self.db._request_json = _request_json
-
- def test__sanity_same_signature(self):
- my_request_sig = inspect.getargspec(self.db._request)
- my_request_sig = (['self'] + my_request_sig[0],) + my_request_sig[1:]
- self.assertEqual(
- my_request_sig,
- inspect.getargspec(http_database.HTTPDatabase._request))
- my_request_json_sig = inspect.getargspec(self.db._request_json)
- my_request_json_sig = ((['self'] + my_request_json_sig[0],) +
- my_request_json_sig[1:])
- self.assertEqual(
- my_request_json_sig,
- inspect.getargspec(http_database.HTTPDatabase._request_json))
-
- def test__ensure(self):
- self.response_val = {'ok': True}, {}
- self.db._ensure()
- self.assertEqual(('PUT', [], {}, {}, None), self.got)
-
- def test__delete(self):
- self.response_val = {'ok': True}, {}
- self.db._delete()
- self.assertEqual(('DELETE', [], {}, {}, None), self.got)
-
- def test__check(self):
- self.response_val = {}, {}
- res = self.db._check()
- self.assertEqual({}, res)
- self.assertEqual(('GET', [], None, None, None), self.got)
-
- def test_put_doc(self):
- self.response_val = {'rev': 'doc-rev'}, {}
- doc = Document('doc-id', None, '{"v": 1}')
- res = self.db.put_doc(doc)
- self.assertEqual('doc-rev', res)
- self.assertEqual('doc-rev', doc.rev)
- self.assertEqual(('PUT', ['doc', 'doc-id'], {},
- '{"v": 1}', 'application/json'), self.got)
-
- self.response_val = {'rev': 'doc-rev-2'}, {}
- doc.content = {"v": 2}
- res = self.db.put_doc(doc)
- self.assertEqual('doc-rev-2', res)
- self.assertEqual('doc-rev-2', doc.rev)
- self.assertEqual(('PUT', ['doc', 'doc-id'], {'old_rev': 'doc-rev'},
- '{"v": 2}', 'application/json'), self.got)
-
- def test_get_doc(self):
- self.response_val = '{"v": 2}', {'x-u1db-rev': 'doc-rev',
- 'x-u1db-has-conflicts': 'false'}
- self.assertGetDoc(self.db, 'doc-id', 'doc-rev', '{"v": 2}', False)
- self.assertEqual(
- ('GET', ['doc', 'doc-id'], {'include_deleted': False}, None, None),
- self.got)
-
- def test_get_doc_non_existing(self):
- self.response_val = errors.DocumentDoesNotExist()
- self.assertIs(None, self.db.get_doc('not-there'))
- self.assertEqual(
- ('GET', ['doc', 'not-there'], {'include_deleted': False}, None,
- None), self.got)
-
- def test_get_doc_deleted(self):
- self.response_val = errors.DocumentDoesNotExist()
- self.assertIs(None, self.db.get_doc('deleted'))
- self.assertEqual(
- ('GET', ['doc', 'deleted'], {'include_deleted': False}, None,
- None), self.got)
-
- def test_get_doc_deleted_include_deleted(self):
- self.response_val = errors.HTTPError(404,
- json.dumps(
- {"error": errors.DOCUMENT_DELETED}
- ),
- {'x-u1db-rev': 'doc-rev-gone',
- 'x-u1db-has-conflicts': 'false'})
- doc = self.db.get_doc('deleted', include_deleted=True)
- self.assertEqual('deleted', doc.doc_id)
- self.assertEqual('doc-rev-gone', doc.rev)
- self.assertIs(None, doc.content)
- self.assertEqual(
- ('GET', ['doc', 'deleted'], {'include_deleted': True}, None, None),
- self.got)
-
- def test_get_doc_pass_through_errors(self):
- self.response_val = errors.HTTPError(500, 'Crash.')
- self.assertRaises(errors.HTTPError,
- self.db.get_doc, 'something-something')
-
- def test_create_doc_with_id(self):
- self.response_val = {'rev': 'doc-rev'}, {}
- new_doc = self.db.create_doc_from_json('{"v": 1}', doc_id='doc-id')
- self.assertEqual('doc-rev', new_doc.rev)
- self.assertEqual('doc-id', new_doc.doc_id)
- self.assertEqual('{"v": 1}', new_doc.get_json())
- self.assertEqual(('PUT', ['doc', 'doc-id'], {},
- '{"v": 1}', 'application/json'), self.got)
-
- def test_create_doc_without_id(self):
- self.response_val = {'rev': 'doc-rev-2'}, {}
- new_doc = self.db.create_doc_from_json('{"v": 3}')
- self.assertEqual('D-', new_doc.doc_id[:2])
- self.assertEqual('doc-rev-2', new_doc.rev)
- self.assertEqual('{"v": 3}', new_doc.get_json())
- self.assertEqual(('PUT', ['doc', new_doc.doc_id], {},
- '{"v": 3}', 'application/json'), self.got)
-
- def test_delete_doc(self):
- self.response_val = {'rev': 'doc-rev-gone'}, {}
- doc = Document('doc-id', 'doc-rev', None)
- self.db.delete_doc(doc)
- self.assertEqual('doc-rev-gone', doc.rev)
- self.assertEqual(('DELETE', ['doc', 'doc-id'], {'old_rev': 'doc-rev'},
- None, None), self.got)
-
- def test_get_sync_target(self):
- st = self.db.get_sync_target()
- self.assertIsInstance(st, http_target.HTTPSyncTarget)
- self.assertEqual(st._url, self.db._url)
-
- def test_get_sync_target_inherits_oauth_credentials(self):
- self.db.set_oauth_credentials(tests.consumer1.key,
- tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- st = self.db.get_sync_target()
- self.assertEqual(self.db._creds, st._creds)
-
-
-class TestHTTPDatabaseCtrWithCreds(tests.TestCase):
-
- def test_ctr_with_creds(self):
- db1 = http_database.HTTPDatabase('http://dbs/db', creds={'oauth': {
- 'consumer_key': tests.consumer1.key,
- 'consumer_secret': tests.consumer1.secret,
- 'token_key': tests.token1.key,
- 'token_secret': tests.token1.secret
- }})
- self.assertIn('oauth', db1._creds)
-
-
-class TestHTTPDatabaseIntegration(tests.TestCaseWithServer):
-
- make_app_with_state = staticmethod(make_http_app)
-
- def setUp(self):
- super(TestHTTPDatabaseIntegration, self).setUp()
- self.startServer()
-
- def test_non_existing_db(self):
- db = http_database.HTTPDatabase(self.getURL('not-there'))
- self.assertRaises(errors.DatabaseDoesNotExist, db.get_doc, 'doc1')
-
- def test__ensure(self):
- db = http_database.HTTPDatabase(self.getURL('new'))
- db._ensure()
- self.assertIs(None, db.get_doc('doc1'))
-
- def test__delete(self):
- self.request_state._create_database('db0')
- db = http_database.HTTPDatabase(self.getURL('db0'))
- db._delete()
- self.assertRaises(errors.DatabaseDoesNotExist,
- self.request_state.check_database, 'db0')
-
- def test_open_database_existing(self):
- self.request_state._create_database('db0')
- db = http_database.HTTPDatabase.open_database(self.getURL('db0'),
- create=False)
- self.assertIs(None, db.get_doc('doc1'))
-
- def test_open_database_non_existing(self):
- self.assertRaises(errors.DatabaseDoesNotExist,
- http_database.HTTPDatabase.open_database,
- self.getURL('not-there'),
- create=False)
-
- def test_open_database_create(self):
- db = http_database.HTTPDatabase.open_database(self.getURL('new'),
- create=True)
- self.assertIs(None, db.get_doc('doc1'))
-
- def test_delete_database_existing(self):
- self.request_state._create_database('db0')
- http_database.HTTPDatabase.delete_database(self.getURL('db0'))
- self.assertRaises(errors.DatabaseDoesNotExist,
- self.request_state.check_database, 'db0')
-
- def test_doc_ids_needing_quoting(self):
- db0 = self.request_state._create_database('db0')
- db = http_database.HTTPDatabase.open_database(self.getURL('db0'),
- create=False)
- doc = Document('%fff', None, '{}')
- db.put_doc(doc)
- self.assertGetDoc(db0, '%fff', doc.rev, '{}', False)
- self.assertGetDoc(db, '%fff', doc.rev, '{}', False)
diff --git a/src/leap/soledad/tests/u1db_tests/test_https.py b/src/leap/soledad/tests/u1db_tests/test_https.py
deleted file mode 100644
index 3f8797d8..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_https.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""Test support for client-side https support."""
-
-import os
-import ssl
-import sys
-
-from paste import httpserver
-
-from leap.soledad.tests import u1db_tests as tests
-
-from u1db.remote import (
- http_client,
- http_target,
-)
-
-from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
- make_oauth_http_app,
-)
-
-
-def https_server_def():
- def make_server(host_port, application):
- from OpenSSL import SSL
- cert_file = os.path.join(os.path.dirname(__file__), 'testing-certs',
- 'testing.cert')
- key_file = os.path.join(os.path.dirname(__file__), 'testing-certs',
- 'testing.key')
- ssl_context = SSL.Context(SSL.SSLv23_METHOD)
- ssl_context.use_privatekey_file(key_file)
- ssl_context.use_certificate_chain_file(cert_file)
- srv = httpserver.WSGIServerBase(application, host_port,
- httpserver.WSGIHandler,
- ssl_context=ssl_context
- )
-
- def shutdown_request(req):
- req.shutdown()
- srv.close_request(req)
-
- srv.shutdown_request = shutdown_request
- application.base_url = "https://localhost:%s" % srv.server_address[1]
- return srv
- return make_server, "shutdown", "https"
-
-
-def oauth_https_sync_target(test, host, path):
- _, port = test.server.server_address
- st = http_target.HTTPSyncTarget('https://%s:%d/~/%s' % (host, port, path))
- st.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return st
-
-
-class TestHttpSyncTargetHttpsSupport(tests.TestCaseWithServer):
-
- scenarios = [
- ('oauth_https', {'server_def': https_server_def,
- 'make_app_with_state': make_oauth_http_app,
- 'make_document_for_test':
- tests.make_document_for_test,
- 'sync_target': oauth_https_sync_target
- }),
- ]
-
- def setUp(self):
- try:
- import OpenSSL # noqa
- except ImportError:
- self.skipTest("Requires pyOpenSSL")
- self.cacert_pem = os.path.join(os.path.dirname(__file__),
- 'testing-certs', 'cacert.pem')
- super(TestHttpSyncTargetHttpsSupport, self).setUp()
-
- def getSyncTarget(self, host, path=None):
- if self.server is None:
- self.startServer()
- return self.sync_target(self, host, path)
-
- def test_working(self):
- self.startServer()
- db = self.request_state._create_database('test')
- self.patch(http_client, 'CA_CERTS', self.cacert_pem)
- remote_target = self.getSyncTarget('localhost', 'test')
- remote_target.record_sync_info('other-id', 2, 'T-id')
- self.assertEqual(
- (2, 'T-id'), db._get_replica_gen_and_trans_id('other-id'))
-
- def test_cannot_verify_cert(self):
- if not sys.platform.startswith('linux'):
- self.skipTest(
- "XXX certificate verification happens on linux only for now")
- self.startServer()
- # don't print expected traceback server-side
- self.server.handle_error = lambda req, cli_addr: None
- self.request_state._create_database('test')
- remote_target = self.getSyncTarget('localhost', 'test')
- try:
- remote_target.record_sync_info('other-id', 2, 'T-id')
- except ssl.SSLError, e:
- self.assertIn("certificate verify failed", str(e))
- else:
- self.fail("certificate verification should have failed.")
-
- def test_host_mismatch(self):
- if not sys.platform.startswith('linux'):
- self.skipTest(
- "XXX certificate verification happens on linux only for now")
- self.startServer()
- self.request_state._create_database('test')
- self.patch(http_client, 'CA_CERTS', self.cacert_pem)
- remote_target = self.getSyncTarget('127.0.0.1', 'test')
- self.assertRaises(
- http_client.CertificateError, remote_target.record_sync_info,
- 'other-id', 2, 'T-id')
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/tests/u1db_tests/test_open.py b/src/leap/soledad/tests/u1db_tests/test_open.py
deleted file mode 100644
index 0ff307e8..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_open.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test u1db.open"""
-
-import os
-
-from u1db import (
- errors,
- open as u1db_open,
-)
-from leap.soledad.tests import u1db_tests as tests
-from u1db.backends import sqlite_backend
-from leap.soledad.tests.u1db_tests.test_backends import TestAlternativeDocument
-
-
-class TestU1DBOpen(tests.TestCase):
-
- def setUp(self):
- super(TestU1DBOpen, self).setUp()
- tmpdir = self.createTempDir()
- self.db_path = tmpdir + '/test.db'
-
- def test_open_no_create(self):
- self.assertRaises(errors.DatabaseDoesNotExist,
- u1db_open, self.db_path, create=False)
- self.assertFalse(os.path.exists(self.db_path))
-
- def test_open_create(self):
- db = u1db_open(self.db_path, create=True)
- self.addCleanup(db.close)
- self.assertTrue(os.path.exists(self.db_path))
- self.assertIsInstance(db, sqlite_backend.SQLiteDatabase)
-
- def test_open_with_factory(self):
- db = u1db_open(self.db_path, create=True,
- document_factory=TestAlternativeDocument)
- self.addCleanup(db.close)
- self.assertEqual(TestAlternativeDocument, db._factory)
-
- def test_open_existing(self):
- db = sqlite_backend.SQLitePartialExpandDatabase(self.db_path)
- self.addCleanup(db.close)
- doc = db.create_doc_from_json(tests.simple_doc)
- # Even though create=True, we shouldn't wipe the db
- db2 = u1db_open(self.db_path, create=True)
- self.addCleanup(db2.close)
- doc2 = db2.get_doc(doc.doc_id)
- self.assertEqual(doc, doc2)
-
- def test_open_existing_no_create(self):
- db = sqlite_backend.SQLitePartialExpandDatabase(self.db_path)
- self.addCleanup(db.close)
- db2 = u1db_open(self.db_path, create=False)
- self.addCleanup(db2.close)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
diff --git a/src/leap/soledad/tests/u1db_tests/test_remote_sync_target.py b/src/leap/soledad/tests/u1db_tests/test_remote_sync_target.py
deleted file mode 100644
index 66d404d2..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_remote_sync_target.py
+++ /dev/null
@@ -1,317 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Tests for the remote sync targets"""
-
-import cStringIO
-
-from u1db import (
- errors,
-)
-
-from leap.soledad.tests import u1db_tests as tests
-
-from u1db.remote import (
- http_app,
- http_target,
- oauth_middleware,
-)
-
-
-class TestHTTPSyncTargetBasics(tests.TestCase):
-
- def test_parse_url(self):
- remote_target = http_target.HTTPSyncTarget('http://127.0.0.1:12345/')
- self.assertEqual('http', remote_target._url.scheme)
- self.assertEqual('127.0.0.1', remote_target._url.hostname)
- self.assertEqual(12345, remote_target._url.port)
- self.assertEqual('/', remote_target._url.path)
-
-
-class TestParsingSyncStream(tests.TestCase):
-
- def test_wrong_start(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "{}\r\n]", None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "\r\n{}\r\n]", None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "", None)
-
- def test_wrong_end(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n{}", None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n", None)
-
- def test_missing_comma(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream,
- '[\r\n{}\r\n{"id": "i", "rev": "r", '
- '"content": "c", "gen": 3}\r\n]', None)
-
- def test_no_entries(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n]", None)
-
- def test_extra_comma(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream, "[\r\n{},\r\n]", None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream,
- '[\r\n{},\r\n{"id": "i", "rev": "r", '
- '"content": "{}", "gen": 3, "trans_id": "T-sid"}'
- ',\r\n]',
- lambda doc, gen, trans_id: None)
-
- def test_error_in_stream(self):
- tgt = http_target.HTTPSyncTarget("http://foo/foo")
-
- self.assertRaises(errors.Unavailable,
- tgt._parse_sync_stream,
- '[\r\n{"new_generation": 0},'
- '\r\n{"error": "unavailable"}\r\n', None)
-
- self.assertRaises(errors.Unavailable,
- tgt._parse_sync_stream,
- '[\r\n{"error": "unavailable"}\r\n', None)
-
- self.assertRaises(errors.BrokenSyncStream,
- tgt._parse_sync_stream,
- '[\r\n{"error": "?"}\r\n', None)
-
-
-def make_http_app(state):
- return http_app.HTTPApp(state)
-
-
-def http_sync_target(test, path):
- return http_target.HTTPSyncTarget(test.getURL(path))
-
-
-def make_oauth_http_app(state):
- app = http_app.HTTPApp(state)
- application = oauth_middleware.OAuthMiddleware(app, None, prefix='/~/')
- application.get_oauth_data_store = lambda: tests.testingOAuthStore
- return application
-
-
-def oauth_http_sync_target(test, path):
- st = http_sync_target(test, '~/' + path)
- st.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return st
-
-
-class TestRemoteSyncTargets(tests.TestCaseWithServer):
-
- scenarios = [
- ('http', {'make_app_with_state': make_http_app,
- 'make_document_for_test': tests.make_document_for_test,
- 'sync_target': http_sync_target}),
- ('oauth_http', {'make_app_with_state': make_oauth_http_app,
- 'make_document_for_test': tests.make_document_for_test,
- 'sync_target': oauth_http_sync_target}),
- ]
-
- def getSyncTarget(self, path=None):
- if self.server is None:
- self.startServer()
- return self.sync_target(self, path)
-
- def test_get_sync_info(self):
- self.startServer()
- db = self.request_state._create_database('test')
- db._set_replica_gen_and_trans_id('other-id', 1, 'T-transid')
- remote_target = self.getSyncTarget('test')
- self.assertEqual(('test', 0, '', 1, 'T-transid'),
- remote_target.get_sync_info('other-id'))
-
- def test_record_sync_info(self):
- self.startServer()
- db = self.request_state._create_database('test')
- remote_target = self.getSyncTarget('test')
- remote_target.record_sync_info('other-id', 2, 'T-transid')
- self.assertEqual(
- (2, 'T-transid'), db._get_replica_gen_and_trans_id('other-id'))
-
- def test_sync_exchange_send(self):
- self.startServer()
- db = self.request_state._create_database('test')
- remote_target = self.getSyncTarget('test')
- other_docs = []
-
- def receive_doc(doc):
- other_docs.append((doc.doc_id, doc.rev, doc.get_json()))
-
- doc = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- new_gen, trans_id = remote_target.sync_exchange(
- [(doc, 10, 'T-sid')], 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=receive_doc)
- self.assertEqual(1, new_gen)
- self.assertGetDoc(
- db, 'doc-here', 'replica:1', '{"value": "here"}', False)
-
- def test_sync_exchange_send_failure_and_retry_scenario(self):
- self.startServer()
-
- def blackhole_getstderr(inst):
- return cStringIO.StringIO()
-
- self.patch(self.server.RequestHandlerClass, 'get_stderr',
- blackhole_getstderr)
- db = self.request_state._create_database('test')
- _put_doc_if_newer = db._put_doc_if_newer
- trigger_ids = ['doc-here2']
-
- def bomb_put_doc_if_newer(doc, save_conflict,
- replica_uid=None, replica_gen=None,
- replica_trans_id=None):
- if doc.doc_id in trigger_ids:
- raise Exception
- return _put_doc_if_newer(doc, save_conflict=save_conflict,
- replica_uid=replica_uid,
- replica_gen=replica_gen,
- replica_trans_id=replica_trans_id)
- self.patch(db, '_put_doc_if_newer', bomb_put_doc_if_newer)
- remote_target = self.getSyncTarget('test')
- other_changes = []
-
- def receive_doc(doc, gen, trans_id):
- other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- doc1 = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- doc2 = self.make_document('doc-here2', 'replica:1',
- '{"value": "here2"}')
- self.assertRaises(
- errors.HTTPError,
- remote_target.sync_exchange,
- [(doc1, 10, 'T-sid'), (doc2, 11, 'T-sud')],
- 'replica', last_known_generation=0, last_known_trans_id=None,
- return_doc_cb=receive_doc)
- self.assertGetDoc(db, 'doc-here', 'replica:1', '{"value": "here"}',
- False)
- self.assertEqual(
- (10, 'T-sid'), db._get_replica_gen_and_trans_id('replica'))
- self.assertEqual([], other_changes)
- # retry
- trigger_ids = []
- new_gen, trans_id = remote_target.sync_exchange(
- [(doc2, 11, 'T-sud')], 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=receive_doc)
- self.assertGetDoc(db, 'doc-here2', 'replica:1', '{"value": "here2"}',
- False)
- self.assertEqual(
- (11, 'T-sud'), db._get_replica_gen_and_trans_id('replica'))
- self.assertEqual(2, new_gen)
- # bounced back to us
- self.assertEqual(
- ('doc-here', 'replica:1', '{"value": "here"}', 1),
- other_changes[0][:-1])
-
- def test_sync_exchange_in_stream_error(self):
- self.startServer()
-
- def blackhole_getstderr(inst):
- return cStringIO.StringIO()
-
- self.patch(self.server.RequestHandlerClass, 'get_stderr',
- blackhole_getstderr)
- db = self.request_state._create_database('test')
- doc = db.create_doc_from_json('{"value": "there"}')
-
- def bomb_get_docs(doc_ids, check_for_conflicts=None,
- include_deleted=False):
- yield doc
- # delayed failure case
- raise errors.Unavailable
-
- self.patch(db, 'get_docs', bomb_get_docs)
- remote_target = self.getSyncTarget('test')
- other_changes = []
-
- def receive_doc(doc, gen, trans_id):
- other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- self.assertRaises(
- errors.Unavailable, remote_target.sync_exchange, [], 'replica',
- last_known_generation=0, last_known_trans_id=None,
- return_doc_cb=receive_doc)
- self.assertEqual(
- (doc.doc_id, doc.rev, '{"value": "there"}', 1),
- other_changes[0][:-1])
-
- def test_sync_exchange_receive(self):
- self.startServer()
- db = self.request_state._create_database('test')
- doc = db.create_doc_from_json('{"value": "there"}')
- remote_target = self.getSyncTarget('test')
- other_changes = []
-
- def receive_doc(doc, gen, trans_id):
- other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- new_gen, trans_id = remote_target.sync_exchange(
- [], 'replica', last_known_generation=0, last_known_trans_id=None,
- return_doc_cb=receive_doc)
- self.assertEqual(1, new_gen)
- self.assertEqual(
- (doc.doc_id, doc.rev, '{"value": "there"}', 1),
- other_changes[0][:-1])
-
- def test_sync_exchange_send_ensure_callback(self):
- self.startServer()
- remote_target = self.getSyncTarget('test')
- other_docs = []
- replica_uid_box = []
-
- def receive_doc(doc):
- other_docs.append((doc.doc_id, doc.rev, doc.get_json()))
-
- def ensure_cb(replica_uid):
- replica_uid_box.append(replica_uid)
-
- doc = self.make_document('doc-here', 'replica:1', '{"value": "here"}')
- new_gen, trans_id = remote_target.sync_exchange(
- [(doc, 10, 'T-sid')], 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=receive_doc,
- ensure_callback=ensure_cb)
- self.assertEqual(1, new_gen)
- db = self.request_state.open_database('test')
- self.assertEqual(1, len(replica_uid_box))
- self.assertEqual(db._replica_uid, replica_uid_box[0])
- self.assertGetDoc(
- db, 'doc-here', 'replica:1', '{"value": "here"}', False)
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/tests/u1db_tests/test_sqlite_backend.py b/src/leap/soledad/tests/u1db_tests/test_sqlite_backend.py
deleted file mode 100644
index 1380e4b1..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_sqlite_backend.py
+++ /dev/null
@@ -1,494 +0,0 @@
-# Copyright 2011 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""Test sqlite backend internals."""
-
-import os
-import time
-import threading
-
-from pysqlcipher import dbapi2
-
-from u1db import (
- errors,
- query_parser,
-)
-
-from leap.soledad.tests import u1db_tests as tests
-
-from u1db.backends import sqlite_backend
-from leap.soledad.tests.u1db_tests.test_backends import TestAlternativeDocument
-
-
-simple_doc = '{"key": "value"}'
-nested_doc = '{"key": "value", "sub": {"doc": "underneath"}}'
-
-
-class TestSQLiteDatabase(tests.TestCase):
-
- def test_atomic_initialize(self):
- tmpdir = self.createTempDir()
- dbname = os.path.join(tmpdir, 'atomic.db')
-
- t2 = None # will be a thread
-
- class SQLiteDatabaseTesting(sqlite_backend.SQLiteDatabase):
- _index_storage_value = "testing"
-
- def __init__(self, dbname, ntry):
- self._try = ntry
- self._is_initialized_invocations = 0
- super(SQLiteDatabaseTesting, self).__init__(dbname)
-
- def _is_initialized(self, c):
- res = super(SQLiteDatabaseTesting, self)._is_initialized(c)
- if self._try == 1:
- self._is_initialized_invocations += 1
- if self._is_initialized_invocations == 2:
- t2.start()
- # hard to do better and have a generic test
- time.sleep(0.05)
- return res
-
- outcome2 = []
-
- def second_try():
- try:
- db2 = SQLiteDatabaseTesting(dbname, 2)
- except Exception, e:
- outcome2.append(e)
- else:
- outcome2.append(db2)
-
- t2 = threading.Thread(target=second_try)
- db1 = SQLiteDatabaseTesting(dbname, 1)
- t2.join()
-
- self.assertIsInstance(outcome2[0], SQLiteDatabaseTesting)
- db2 = outcome2[0]
- self.assertTrue(db2._is_initialized(db1._get_sqlite_handle().cursor()))
-
-
-class TestSQLitePartialExpandDatabase(tests.TestCase):
-
- def setUp(self):
- super(TestSQLitePartialExpandDatabase, self).setUp()
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- self.db._set_replica_uid('test')
-
- def test_create_database(self):
- raw_db = self.db._get_sqlite_handle()
- self.assertNotEqual(None, raw_db)
-
- def test_default_replica_uid(self):
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- self.assertIsNot(None, self.db._replica_uid)
- self.assertEqual(32, len(self.db._replica_uid))
- int(self.db._replica_uid, 16)
-
- def test__close_sqlite_handle(self):
- raw_db = self.db._get_sqlite_handle()
- self.db._close_sqlite_handle()
- self.assertRaises(dbapi2.ProgrammingError,
- raw_db.cursor)
-
- def test_create_database_initializes_schema(self):
- raw_db = self.db._get_sqlite_handle()
- c = raw_db.cursor()
- c.execute("SELECT * FROM u1db_config")
- config = dict([(r[0], r[1]) for r in c.fetchall()])
- self.assertEqual({'sql_schema': '0', 'replica_uid': 'test',
- 'index_storage': 'expand referenced'}, config)
-
- # These tables must exist, though we don't care what is in them yet
- c.execute("SELECT * FROM transaction_log")
- c.execute("SELECT * FROM document")
- c.execute("SELECT * FROM document_fields")
- c.execute("SELECT * FROM sync_log")
- c.execute("SELECT * FROM conflicts")
- c.execute("SELECT * FROM index_definitions")
-
- def test__parse_index(self):
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- g = self.db._parse_index_definition('fieldname')
- self.assertIsInstance(g, query_parser.ExtractField)
- self.assertEqual(['fieldname'], g.field)
-
- def test__update_indexes(self):
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- g = self.db._parse_index_definition('fieldname')
- c = self.db._get_sqlite_handle().cursor()
- self.db._update_indexes('doc-id', {'fieldname': 'val'},
- [('fieldname', g)], c)
- c.execute('SELECT doc_id, field_name, value FROM document_fields')
- self.assertEqual([('doc-id', 'fieldname', 'val')],
- c.fetchall())
-
- def test__set_replica_uid(self):
- # Start from scratch, so that replica_uid isn't set.
- self.db = sqlite_backend.SQLitePartialExpandDatabase(':memory:')
- self.assertIsNot(None, self.db._real_replica_uid)
- self.assertIsNot(None, self.db._replica_uid)
- self.db._set_replica_uid('foo')
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT value FROM u1db_config WHERE name='replica_uid'")
- self.assertEqual(('foo',), c.fetchone())
- self.assertEqual('foo', self.db._real_replica_uid)
- self.assertEqual('foo', self.db._replica_uid)
- self.db._close_sqlite_handle()
- self.assertEqual('foo', self.db._replica_uid)
-
- def test__get_generation(self):
- self.assertEqual(0, self.db._get_generation())
-
- def test__get_generation_info(self):
- self.assertEqual((0, ''), self.db._get_generation_info())
-
- def test_create_index(self):
- self.db.create_index('test-idx', "key")
- self.assertEqual([('test-idx', ["key"])], self.db.list_indexes())
-
- def test_create_index_multiple_fields(self):
- self.db.create_index('test-idx', "key", "key2")
- self.assertEqual([('test-idx', ["key", "key2"])],
- self.db.list_indexes())
-
- def test__get_index_definition(self):
- self.db.create_index('test-idx', "key", "key2")
- # TODO: How would you test that an index is getting used for an SQL
- # request?
- self.assertEqual(["key", "key2"],
- self.db._get_index_definition('test-idx'))
-
- def test_list_index_mixed(self):
- # Make sure that we properly order the output
- c = self.db._get_sqlite_handle().cursor()
- # We intentionally insert the data in weird ordering, to make sure the
- # query still gets it back correctly.
- c.executemany("INSERT INTO index_definitions VALUES (?, ?, ?)",
- [('idx-1', 0, 'key10'),
- ('idx-2', 2, 'key22'),
- ('idx-1', 1, 'key11'),
- ('idx-2', 0, 'key20'),
- ('idx-2', 1, 'key21')])
- self.assertEqual([('idx-1', ['key10', 'key11']),
- ('idx-2', ['key20', 'key21', 'key22'])],
- self.db.list_indexes())
-
- def test_no_indexes_no_document_fields(self):
- self.db.create_doc_from_json(
- '{"key1": "val1", "key2": "val2"}')
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([], c.fetchall())
-
- def test_create_extracts_fields(self):
- doc1 = self.db.create_doc_from_json('{"key1": "val1", "key2": "val2"}')
- doc2 = self.db.create_doc_from_json('{"key1": "valx", "key2": "valy"}')
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([], c.fetchall())
- self.db.create_index('test', 'key1', 'key2')
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual(sorted(
- [(doc1.doc_id, "key1", "val1"),
- (doc1.doc_id, "key2", "val2"),
- (doc2.doc_id, "key1", "valx"),
- (doc2.doc_id, "key2", "valy"), ]), sorted(c.fetchall()))
-
- def test_put_updates_fields(self):
- self.db.create_index('test', 'key1', 'key2')
- doc1 = self.db.create_doc_from_json(
- '{"key1": "val1", "key2": "val2"}')
- doc1.content = {"key1": "val1", "key2": "valy"}
- self.db.put_doc(doc1)
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([(doc1.doc_id, "key1", "val1"),
- (doc1.doc_id, "key2", "valy"), ], c.fetchall())
-
- def test_put_updates_nested_fields(self):
- self.db.create_index('test', 'key', 'sub.doc')
- doc1 = self.db.create_doc_from_json(nested_doc)
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([(doc1.doc_id, "key", "value"),
- (doc1.doc_id, "sub.doc", "underneath"), ],
- c.fetchall())
-
- def test__ensure_schema_rollback(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/rollback.db'
-
- class SQLitePartialExpandDbTesting(
- sqlite_backend.SQLitePartialExpandDatabase):
-
- def _set_replica_uid_in_transaction(self, uid):
- super(SQLitePartialExpandDbTesting,
- self)._set_replica_uid_in_transaction(uid)
- if fail:
- raise Exception()
-
- db = SQLitePartialExpandDbTesting.__new__(SQLitePartialExpandDbTesting)
- db._db_handle = dbapi2.connect(path) # db is there but not yet init-ed
- fail = True
- self.assertRaises(Exception, db._ensure_schema)
- fail = False
- db._initialize(db._db_handle.cursor())
-
- def test__open_database(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/test.sqlite'
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db2 = sqlite_backend.SQLiteDatabase._open_database(path)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
-
- def test__open_database_with_factory(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/test.sqlite'
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db2 = sqlite_backend.SQLiteDatabase._open_database(
- path, document_factory=TestAlternativeDocument)
- self.assertEqual(TestAlternativeDocument, db2._factory)
-
- def test__open_database_non_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
- self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase._open_database, path)
-
- def test__open_database_during_init(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/initialised.db'
- db = sqlite_backend.SQLitePartialExpandDatabase.__new__(
- sqlite_backend.SQLitePartialExpandDatabase)
- db._db_handle = dbapi2.connect(path) # db is there but not yet init-ed
- self.addCleanup(db.close)
- observed = []
-
- class SQLiteDatabaseTesting(sqlite_backend.SQLiteDatabase):
- WAIT_FOR_PARALLEL_INIT_HALF_INTERVAL = 0.1
-
- @classmethod
- def _which_index_storage(cls, c):
- res = super(SQLiteDatabaseTesting, cls)._which_index_storage(c)
- db._ensure_schema() # init db
- observed.append(res[0])
- return res
-
- db2 = SQLiteDatabaseTesting._open_database(path)
- self.addCleanup(db2.close)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
- self.assertEqual(
- [None,
- sqlite_backend.SQLitePartialExpandDatabase._index_storage_value],
- observed)
-
- def test__open_database_invalid(self):
- class SQLiteDatabaseTesting(sqlite_backend.SQLiteDatabase):
- WAIT_FOR_PARALLEL_INIT_HALF_INTERVAL = 0.1
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path1 = temp_dir + '/invalid1.db'
- with open(path1, 'wb') as f:
- f.write("")
- self.assertRaises(dbapi2.OperationalError,
- SQLiteDatabaseTesting._open_database, path1)
- with open(path1, 'wb') as f:
- f.write("invalid")
- self.assertRaises(dbapi2.DatabaseError,
- SQLiteDatabaseTesting._open_database, path1)
-
- def test_open_database_existing(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/existing.sqlite'
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db2 = sqlite_backend.SQLiteDatabase.open_database(path, create=False)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
-
- def test_open_database_with_factory(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/existing.sqlite'
- sqlite_backend.SQLitePartialExpandDatabase(path)
- db2 = sqlite_backend.SQLiteDatabase.open_database(
- path, create=False, document_factory=TestAlternativeDocument)
- self.assertEqual(TestAlternativeDocument, db2._factory)
-
- def test_open_database_create(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/new.sqlite'
- sqlite_backend.SQLiteDatabase.open_database(path, create=True)
- db2 = sqlite_backend.SQLiteDatabase.open_database(path, create=False)
- self.assertIsInstance(db2, sqlite_backend.SQLitePartialExpandDatabase)
-
- def test_open_database_non_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
- self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase.open_database, path,
- create=False)
-
- def test_delete_database_existent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/new.sqlite'
- db = sqlite_backend.SQLiteDatabase.open_database(path, create=True)
- db.close()
- sqlite_backend.SQLiteDatabase.delete_database(path)
- self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase.open_database, path,
- create=False)
-
- def test_delete_database_nonexistent(self):
- temp_dir = self.createTempDir(prefix='u1db-test-')
- path = temp_dir + '/non-existent.sqlite'
- self.assertRaises(errors.DatabaseDoesNotExist,
- sqlite_backend.SQLiteDatabase.delete_database, path)
-
- def test__get_indexed_fields(self):
- self.db.create_index('idx1', 'a', 'b')
- self.assertEqual(set(['a', 'b']), self.db._get_indexed_fields())
- self.db.create_index('idx2', 'b', 'c')
- self.assertEqual(set(['a', 'b', 'c']), self.db._get_indexed_fields())
-
- def test_indexed_fields_expanded(self):
- self.db.create_index('idx1', 'key1')
- doc1 = self.db.create_doc_from_json('{"key1": "val1", "key2": "val2"}')
- self.assertEqual(set(['key1']), self.db._get_indexed_fields())
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([(doc1.doc_id, 'key1', 'val1')], c.fetchall())
-
- def test_create_index_updates_fields(self):
- doc1 = self.db.create_doc_from_json('{"key1": "val1", "key2": "val2"}')
- self.db.create_index('idx1', 'key1')
- self.assertEqual(set(['key1']), self.db._get_indexed_fields())
- c = self.db._get_sqlite_handle().cursor()
- c.execute("SELECT doc_id, field_name, value FROM document_fields"
- " ORDER BY doc_id, field_name, value")
- self.assertEqual([(doc1.doc_id, 'key1', 'val1')], c.fetchall())
-
- def assertFormatQueryEquals(self, exp_statement, exp_args, definition,
- values):
- statement, args = self.db._format_query(definition, values)
- self.assertEqual(exp_statement, statement)
- self.assertEqual(exp_args, args)
-
- def test__format_query(self):
- self.assertFormatQueryEquals(
- "SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM "
- "document d, document_fields d0 LEFT OUTER JOIN conflicts c ON "
- "c.doc_id = d.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name "
- "= ? AND d0.value = ? GROUP BY d.doc_id, d.doc_rev, d.content "
- "ORDER BY d0.value;", ["key1", "a"],
- ["key1"], ["a"])
-
- def test__format_query2(self):
- self.assertFormatQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value = ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value = ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value = ? GROUP BY d.doc_id, d.doc_rev, d.content ORDER BY '
- 'd0.value, d1.value, d2.value;',
- ["key1", "a", "key2", "b", "key3", "c"],
- ["key1", "key2", "key3"], ["a", "b", "c"])
-
- def test__format_query_wildcard(self):
- self.assertFormatQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value = ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value GLOB ? AND d.doc_id = d2.doc_id AND d2.field_name = ? '
- 'AND d2.value NOT NULL GROUP BY d.doc_id, d.doc_rev, d.content '
- 'ORDER BY d0.value, d1.value, d2.value;',
- ["key1", "a", "key2", "b*", "key3"], ["key1", "key2", "key3"],
- ["a", "b*", "*"])
-
- def assertFormatRangeQueryEquals(self, exp_statement, exp_args, definition,
- start_value, end_value):
- statement, args = self.db._format_range_query(
- definition, start_value, end_value)
- self.assertEqual(exp_statement, statement)
- self.assertEqual(exp_args, args)
-
- def test__format_range_query(self):
- self.assertFormatRangeQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value >= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value >= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value >= ? AND d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value <= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value <= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value <= ? GROUP BY d.doc_id, d.doc_rev, d.content ORDER BY '
- 'd0.value, d1.value, d2.value;',
- ['key1', 'a', 'key2', 'b', 'key3', 'c', 'key1', 'p', 'key2', 'q',
- 'key3', 'r'],
- ["key1", "key2", "key3"], ["a", "b", "c"], ["p", "q", "r"])
-
- def test__format_range_query_no_start(self):
- self.assertFormatRangeQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value <= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value <= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value <= ? GROUP BY d.doc_id, d.doc_rev, d.content ORDER BY '
- 'd0.value, d1.value, d2.value;',
- ['key1', 'a', 'key2', 'b', 'key3', 'c'],
- ["key1", "key2", "key3"], None, ["a", "b", "c"])
-
- def test__format_range_query_no_end(self):
- self.assertFormatRangeQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value >= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value >= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value >= ? GROUP BY d.doc_id, d.doc_rev, d.content ORDER BY '
- 'd0.value, d1.value, d2.value;',
- ['key1', 'a', 'key2', 'b', 'key3', 'c'],
- ["key1", "key2", "key3"], ["a", "b", "c"], None)
-
- def test__format_range_query_wildcard(self):
- self.assertFormatRangeQueryEquals(
- 'SELECT d.doc_id, d.doc_rev, d.content, count(c.doc_rev) FROM '
- 'document d, document_fields d0, document_fields d1, '
- 'document_fields d2 LEFT OUTER JOIN conflicts c ON c.doc_id = '
- 'd.doc_id WHERE d.doc_id = d0.doc_id AND d0.field_name = ? AND '
- 'd0.value >= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? AND '
- 'd1.value >= ? AND d.doc_id = d2.doc_id AND d2.field_name = ? AND '
- 'd2.value NOT NULL AND d.doc_id = d0.doc_id AND d0.field_name = ? '
- 'AND d0.value <= ? AND d.doc_id = d1.doc_id AND d1.field_name = ? '
- 'AND (d1.value < ? OR d1.value GLOB ?) AND d.doc_id = d2.doc_id '
- 'AND d2.field_name = ? AND d2.value NOT NULL GROUP BY d.doc_id, '
- 'd.doc_rev, d.content ORDER BY d0.value, d1.value, d2.value;',
- ['key1', 'a', 'key2', 'b', 'key3', 'key1', 'p', 'key2', 'q', 'q*',
- 'key3'],
- ["key1", "key2", "key3"], ["a", "b*", "*"], ["p", "q*", "*"])
diff --git a/src/leap/soledad/tests/u1db_tests/test_sync.py b/src/leap/soledad/tests/u1db_tests/test_sync.py
deleted file mode 100644
index 96aa2736..00000000
--- a/src/leap/soledad/tests/u1db_tests/test_sync.py
+++ /dev/null
@@ -1,1242 +0,0 @@
-# Copyright 2011-2012 Canonical Ltd.
-#
-# This file is part of u1db.
-#
-# u1db is free software: you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License version 3
-# as published by the Free Software Foundation.
-#
-# u1db is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with u1db. If not, see <http://www.gnu.org/licenses/>.
-
-"""The Synchronization class for U1DB."""
-
-import os
-from wsgiref import simple_server
-
-from u1db import (
- errors,
- sync,
- vectorclock,
- SyncTarget,
-)
-
-from leap.soledad.tests import u1db_tests as tests
-
-from u1db.backends import (
- inmemory,
-)
-from u1db.remote import (
- http_target,
-)
-
-from leap.soledad.tests.u1db_tests.test_remote_sync_target import (
- make_http_app,
- make_oauth_http_app,
-)
-
-simple_doc = tests.simple_doc
-nested_doc = tests.nested_doc
-
-
-def _make_local_db_and_target(test):
- db = test.create_database('test')
- st = db.get_sync_target()
- return db, st
-
-
-def _make_local_db_and_http_target(test, path='test'):
- test.startServer()
- db = test.request_state._create_database(os.path.basename(path))
- st = http_target.HTTPSyncTarget.connect(test.getURL(path))
- return db, st
-
-
-def _make_local_db_and_oauth_http_target(test):
- db, st = _make_local_db_and_http_target(test, '~/test')
- st.set_oauth_credentials(tests.consumer1.key, tests.consumer1.secret,
- tests.token1.key, tests.token1.secret)
- return db, st
-
-
-target_scenarios = [
- ('local', {'create_db_and_target': _make_local_db_and_target}),
- ('http', {'create_db_and_target': _make_local_db_and_http_target,
- 'make_app_with_state': make_http_app}),
- ('oauth_http', {'create_db_and_target':
- _make_local_db_and_oauth_http_target,
- 'make_app_with_state': make_oauth_http_app}),
-]
-
-
-class DatabaseSyncTargetTests(tests.DatabaseBaseTests,
- tests.TestCaseWithServer):
-
- scenarios = (tests.multiply_scenarios(tests.DatabaseBaseTests.scenarios,
- target_scenarios))
- #+ c_db_scenarios)
- # whitebox true means self.db is the actual local db object
- # against which the sync is performed
- whitebox = True
-
- def setUp(self):
- super(DatabaseSyncTargetTests, self).setUp()
- self.db, self.st = self.create_db_and_target(self)
- self.other_changes = []
-
- def tearDown(self):
- # We delete them explicitly, so that connections are cleanly closed
- del self.st
- self.db.close()
- del self.db
- super(DatabaseSyncTargetTests, self).tearDown()
-
- def receive_doc(self, doc, gen, trans_id):
- self.other_changes.append(
- (doc.doc_id, doc.rev, doc.get_json(), gen, trans_id))
-
- def set_trace_hook(self, callback, shallow=False):
- setter = (self.st._set_trace_hook if not shallow else
- self.st._set_trace_hook_shallow)
- try:
- setter(callback)
- except NotImplementedError:
- self.skipTest("%s does not implement _set_trace_hook"
- % (self.st.__class__.__name__,))
-
- def test_get_sync_target(self):
- self.assertIsNot(None, self.st)
-
- def test_get_sync_info(self):
- self.assertEqual(
- ('test', 0, '', 0, ''), self.st.get_sync_info('other'))
-
- def test_create_doc_updates_sync_info(self):
- self.assertEqual(
- ('test', 0, '', 0, ''), self.st.get_sync_info('other'))
- self.db.create_doc_from_json(simple_doc)
- self.assertEqual(1, self.st.get_sync_info('other')[1])
-
- def test_record_sync_info(self):
- self.st.record_sync_info('replica', 10, 'T-transid')
- self.assertEqual(
- ('test', 0, '', 10, 'T-transid'), self.st.get_sync_info('replica'))
-
- def test_sync_exchange(self):
- docs_by_gen = [
- (self.make_document('doc-id', 'replica:1', simple_doc), 10,
- 'T-sid')]
- new_gen, trans_id = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertGetDoc(self.db, 'doc-id', 'replica:1', simple_doc, False)
- self.assertTransactionLog(['doc-id'], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 1, last_trans_id),
- (self.other_changes, new_gen, last_trans_id))
- self.assertEqual(10, self.st.get_sync_info('replica')[3])
-
- def test_sync_exchange_deleted(self):
- doc = self.db.create_doc_from_json('{}')
- edit_rev = 'replica:1|' + doc.rev
- docs_by_gen = [
- (self.make_document(doc.doc_id, edit_rev, None), 10, 'T-sid')]
- new_gen, trans_id = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertGetDocIncludeDeleted(
- self.db, doc.doc_id, edit_rev, None, False)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 2, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- self.assertEqual(10, self.st.get_sync_info('replica')[3])
-
- def test_sync_exchange_push_many(self):
- docs_by_gen = [
- (self.make_document('doc-id', 'replica:1', simple_doc), 10, 'T-1'),
- (self.make_document('doc-id2', 'replica:1', nested_doc), 11,
- 'T-2')]
- new_gen, trans_id = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertGetDoc(self.db, 'doc-id', 'replica:1', simple_doc, False)
- self.assertGetDoc(self.db, 'doc-id2', 'replica:1', nested_doc, False)
- self.assertTransactionLog(['doc-id', 'doc-id2'], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 2, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- self.assertEqual(11, self.st.get_sync_info('replica')[3])
-
- def test_sync_exchange_refuses_conflicts(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- (self.make_document(doc.doc_id, 'replica:1', new_doc), 10,
- 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(
- (doc.doc_id, doc.rev, simple_doc, 1), self.other_changes[0][:-1])
- self.assertEqual(1, new_gen)
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 1, 'docs': [(doc.doc_id, doc.rev)]})
-
- def test_sync_exchange_ignores_convergence(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- gen, txid = self.db._get_generation_info()
- docs_by_gen = [
- (self.make_document(doc.doc_id, doc.rev, simple_doc), 10, 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'replica', last_known_generation=gen,
- last_known_trans_id=txid, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(([], 1), (self.other_changes, new_gen))
-
- def test_sync_exchange_returns_new_docs(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_gen, _ = self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertEqual(
- (doc.doc_id, doc.rev, simple_doc, 1), self.other_changes[0][:-1])
- self.assertEqual(1, new_gen)
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 1, 'docs': [(doc.doc_id, doc.rev)]})
-
- def test_sync_exchange_returns_deleted_docs(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.db.delete_doc(doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- new_gen, _ = self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- self.assertEqual(
- (doc.doc_id, doc.rev, None, 2), self.other_changes[0][:-1])
- self.assertEqual(2, new_gen)
- if self.whitebox:
- self.assertEqual(self.db._last_exchange_log['return'],
- {'last_gen': 2, 'docs': [(doc.doc_id, doc.rev)]})
-
- def test_sync_exchange_returns_many_new_docs(self):
- doc = self.db.create_doc_from_json(simple_doc)
- doc2 = self.db.create_doc_from_json(nested_doc)
- self.assertTransactionLog([doc.doc_id, doc2.doc_id], self.db)
- new_gen, _ = self.st.sync_exchange(
- [], 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc2.doc_id], self.db)
- self.assertEqual(2, new_gen)
- self.assertEqual(
- [(doc.doc_id, doc.rev, simple_doc, 1),
- (doc2.doc_id, doc2.rev, nested_doc, 2)],
- [c[:-1] for c in self.other_changes])
- if self.whitebox:
- self.assertEqual(
- self.db._last_exchange_log['return'],
- {'last_gen': 2, 'docs':
- [(doc.doc_id, doc.rev), (doc2.doc_id, doc2.rev)]})
-
- def test_sync_exchange_getting_newer_docs(self):
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- (self.make_document(doc.doc_id, 'test:1|z:2', new_doc), 10,
- 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertTransactionLog([doc.doc_id, doc.doc_id], self.db)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- def test_sync_exchange_with_concurrent_updates_of_synced_doc(self):
- expected = []
-
- def before_whatschanged_cb(state):
- if state != 'before whats_changed':
- return
- cont = '{"key": "cuncurrent"}'
- conc_rev = self.db.put_doc(
- self.make_document(doc.doc_id, 'test:1|z:2', cont))
- expected.append((doc.doc_id, conc_rev, cont, 3))
-
- self.set_trace_hook(before_whatschanged_cb)
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- (self.make_document(doc.doc_id, 'test:1|z:2', new_doc), 10,
- 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertEqual(expected, [c[:-1] for c in self.other_changes])
- self.assertEqual(3, new_gen)
-
- def test_sync_exchange_with_concurrent_updates(self):
-
- def after_whatschanged_cb(state):
- if state != 'after whats_changed':
- return
- self.db.create_doc_from_json('{"new": "doc"}')
-
- self.set_trace_hook(after_whatschanged_cb)
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- new_doc = '{"key": "altval"}'
- docs_by_gen = [
- (self.make_document(doc.doc_id, 'test:1|z:2', new_doc), 10,
- 'T-sid')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- def test_sync_exchange_converged_handling(self):
- doc = self.db.create_doc_from_json(simple_doc)
- docs_by_gen = [
- (self.make_document('new', 'other:1', '{}'), 4, 'T-foo'),
- (self.make_document(doc.doc_id, doc.rev, doc.get_json()), 5,
- 'T-bar')]
- new_gen, _ = self.st.sync_exchange(
- docs_by_gen, 'other-replica', last_known_generation=0,
- last_known_trans_id=None, return_doc_cb=self.receive_doc)
- self.assertEqual(([], 2), (self.other_changes, new_gen))
-
- def test_sync_exchange_detect_incomplete_exchange(self):
- def before_get_docs_explode(state):
- if state != 'before get_docs':
- return
- raise errors.U1DBError("fail")
- self.set_trace_hook(before_get_docs_explode)
- # suppress traceback printing in the wsgiref server
- self.patch(simple_server.ServerHandler,
- 'log_exception', lambda h, exc_info: None)
- doc = self.db.create_doc_from_json(simple_doc)
- self.assertTransactionLog([doc.doc_id], self.db)
- self.assertRaises(
- (errors.U1DBError, errors.BrokenSyncStream),
- self.st.sync_exchange, [], 'other-replica',
- last_known_generation=0, last_known_trans_id=None,
- return_doc_cb=self.receive_doc)
-
- def test_sync_exchange_doc_ids(self):
- sync_exchange_doc_ids = getattr(self.st, 'sync_exchange_doc_ids', None)
- if sync_exchange_doc_ids is None:
- self.skipTest("sync_exchange_doc_ids not implemented")
- db2 = self.create_database('test2')
- doc = db2.create_doc_from_json(simple_doc)
- new_gen, trans_id = sync_exchange_doc_ids(
- db2, [(doc.doc_id, 10, 'T-sid')], 0, None,
- return_doc_cb=self.receive_doc)
- self.assertGetDoc(self.db, doc.doc_id, doc.rev, simple_doc, False)
- self.assertTransactionLog([doc.doc_id], self.db)
- last_trans_id = self.getLastTransId(self.db)
- self.assertEqual(([], 1, last_trans_id),
- (self.other_changes, new_gen, trans_id))
- self.assertEqual(10, self.st.get_sync_info(db2._replica_uid)[3])
-
- def test__set_trace_hook(self):
- called = []
-
- def cb(state):
- called.append(state)
-
- self.set_trace_hook(cb)
- self.st.sync_exchange([], 'replica', 0, None, self.receive_doc)
- self.st.record_sync_info('replica', 0, 'T-sid')
- self.assertEqual(['before whats_changed',
- 'after whats_changed',
- 'before get_docs',
- 'record_sync_info',
- ],
- called)
-
- def test__set_trace_hook_shallow(self):
- if (self.st._set_trace_hook_shallow == self.st._set_trace_hook
- or
- self.st._set_trace_hook_shallow.im_func ==
- SyncTarget._set_trace_hook_shallow.im_func):
- # shallow same as full
- expected = ['before whats_changed',
- 'after whats_changed',
- 'before get_docs',
- 'record_sync_info',
- ]
- else:
- expected = ['sync_exchange', 'record_sync_info']
-
- called = []
-
- def cb(state):
- called.append(state)
-
- self.set_trace_hook(cb, shallow=True)
- self.st.sync_exchange([], 'replica', 0, None, self.receive_doc)
- self.st.record_sync_info('replica', 0, 'T-sid')
- self.assertEqual(expected, called)
-
-
-def sync_via_synchronizer(test, db_source, db_target, trace_hook=None,
- trace_hook_shallow=None):
- target = db_target.get_sync_target()
- trace_hook = trace_hook or trace_hook_shallow
- if trace_hook:
- target._set_trace_hook(trace_hook)
- return sync.Synchronizer(db_source, target).sync()
-
-
-sync_scenarios = []
-for name, scenario in tests.LOCAL_DATABASES_SCENARIOS:
- scenario = dict(scenario)
- scenario['do_sync'] = sync_via_synchronizer
- sync_scenarios.append((name, scenario))
- scenario = dict(scenario)
-
-
-def make_database_for_http_test(test, replica_uid):
- if test.server is None:
- test.startServer()
- db = test.request_state._create_database(replica_uid)
- try:
- http_at = test._http_at
- except AttributeError:
- http_at = test._http_at = {}
- http_at[db] = replica_uid
- return db
-
-
-def copy_database_for_http_test(test, db):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES IS
- # THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST THAT WE
- # CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS RATHER THAN
- # CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND NINJA TO YOUR HOUSE.
- if test.server is None:
- test.startServer()
- new_db = test.request_state._copy_database(db)
- try:
- http_at = test._http_at
- except AttributeError:
- http_at = test._http_at = {}
- path = db._replica_uid
- while path in http_at.values():
- path += 'copy'
- http_at[new_db] = path
- return new_db
-
-
-def sync_via_synchronizer_and_http(test, db_source, db_target,
- trace_hook=None, trace_hook_shallow=None):
- if trace_hook:
- test.skipTest("full trace hook unsupported over http")
- path = test._http_at[db_target]
- target = http_target.HTTPSyncTarget.connect(test.getURL(path))
- if trace_hook_shallow:
- target._set_trace_hook_shallow(trace_hook_shallow)
- return sync.Synchronizer(db_source, target).sync()
-
-
-sync_scenarios.append(('pyhttp', {
- 'make_database_for_test': make_database_for_http_test,
- 'copy_database_for_test': copy_database_for_http_test,
- 'make_document_for_test': tests.make_document_for_test,
- 'make_app_with_state': make_http_app,
- 'do_sync': sync_via_synchronizer_and_http
-}))
-
-
-class DatabaseSyncTests(tests.DatabaseBaseTests,
- tests.TestCaseWithServer):
-
- scenarios = sync_scenarios
- do_sync = None # set by scenarios
-
- def create_database(self, replica_uid, sync_role=None):
- if replica_uid == 'test' and sync_role is None:
- # created up the chain by base class but unused
- return None
- db = self.create_database_for_role(replica_uid, sync_role)
- if sync_role:
- self._use_tracking[db] = (replica_uid, sync_role)
- return db
-
- def create_database_for_role(self, replica_uid, sync_role):
- # hook point for reuse
- return super(DatabaseSyncTests, self).create_database(replica_uid)
-
- def copy_database(self, db, sync_role=None):
- # DO NOT COPY OR REUSE THIS CODE OUTSIDE TESTS: COPYING U1DB DATABASES
- # IS THE WRONG THING TO DO, THE ONLY REASON WE DO SO HERE IS TO TEST
- # THAT WE CORRECTLY DETECT IT HAPPENING SO THAT WE CAN RAISE ERRORS
- # RATHER THAN CORRUPT USER DATA. USE SYNC INSTEAD, OR WE WILL SEND
- # NINJA TO YOUR HOUSE.
- db_copy = super(DatabaseSyncTests, self).copy_database(db)
- name, orig_sync_role = self._use_tracking[db]
- self._use_tracking[db_copy] = (name + '(copy)', sync_role
- or orig_sync_role)
- return db_copy
-
- def sync(self, db_from, db_to, trace_hook=None,
- trace_hook_shallow=None):
- from_name, from_sync_role = self._use_tracking[db_from]
- to_name, to_sync_role = self._use_tracking[db_to]
- if from_sync_role not in ('source', 'both'):
- raise Exception("%s marked for %s use but used as source" %
- (from_name, from_sync_role))
- if to_sync_role not in ('target', 'both'):
- raise Exception("%s marked for %s use but used as target" %
- (to_name, to_sync_role))
- return self.do_sync(self, db_from, db_to, trace_hook,
- trace_hook_shallow)
-
- def setUp(self):
- self._use_tracking = {}
- super(DatabaseSyncTests, self).setUp()
-
- def assertLastExchangeLog(self, db, expected):
- log = getattr(db, '_last_exchange_log', None)
- if log is None:
- return
- self.assertEqual(expected, log)
-
- def test_sync_tracks_db_generation_of_other(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.assertEqual(0, self.sync(self.db1, self.db2))
- self.assertEqual(
- (0, ''), self.db1._get_replica_gen_and_trans_id('test2'))
- self.assertEqual(
- (0, ''), self.db2._get_replica_gen_and_trans_id('test1'))
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [], 'last_known_gen': 0},
- 'return':
- {'docs': [], 'last_gen': 0}})
-
- def test_sync_autoresolves(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(simple_doc, doc_id='doc')
- rev1 = doc1.rev
- doc2 = self.db2.create_doc_from_json(simple_doc, doc_id='doc')
- rev2 = doc2.rev
- self.sync(self.db1, self.db2)
- doc = self.db1.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- self.assertEqual(doc.rev, self.db2.get_doc('doc').rev)
- v = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(v.is_newer(vectorclock.VectorClockRev(rev1)))
- self.assertTrue(v.is_newer(vectorclock.VectorClockRev(rev2)))
-
- def test_sync_autoresolves_moar(self):
- # here we test that when a database that has a conflicted document is
- # the source of a sync, and the target database has a revision of the
- # conflicted document that is newer than the source database's, and
- # that target's database's document's content is the same as the
- # source's document's conflict's, the source's document's conflict gets
- # autoresolved, and the source's document's revision bumped.
- #
- # idea is as follows:
- # A B
- # a1 -
- # `------->
- # a1 a1
- # v v
- # a2 a1b1
- # `------->
- # a1b1+a2 a1b1
- # v
- # a1b1+a2 a1b2 (a1b2 has same content as a2)
- # `------->
- # a3b2 a1b2 (autoresolved)
- # `------->
- # a3b2 a3b2
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(simple_doc, doc_id='doc')
- self.sync(self.db1, self.db2)
- for db, content in [(self.db1, '{}'), (self.db2, '{"hi": 42}')]:
- doc = db.get_doc('doc')
- doc.set_json(content)
- db.put_doc(doc)
- self.sync(self.db1, self.db2)
- # db1 and db2 now both have a doc of {hi:42}, but db1 has a conflict
- doc = self.db1.get_doc('doc')
- rev1 = doc.rev
- self.assertTrue(doc.has_conflicts)
- # set db2 to have a doc of {} (same as db1 before the conflict)
- doc = self.db2.get_doc('doc')
- doc.set_json('{}')
- self.db2.put_doc(doc)
- rev2 = doc.rev
- # sync it across
- self.sync(self.db1, self.db2)
- # tadaa!
- doc = self.db1.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- vec1 = vectorclock.VectorClockRev(rev1)
- vec2 = vectorclock.VectorClockRev(rev2)
- vec3 = vectorclock.VectorClockRev(doc.rev)
- self.assertTrue(vec3.is_newer(vec1))
- self.assertTrue(vec3.is_newer(vec2))
- # because the conflict is on the source, sync it another time
- self.sync(self.db1, self.db2)
- # make sure db2 now has the exact same thing
- self.assertEqual(self.db1.get_doc('doc'), self.db2.get_doc('doc'))
-
- def test_sync_autoresolves_moar_backwards(self):
- # here we test that when a database that has a conflicted document is
- # the target of a sync, and the source database has a revision of the
- # conflicted document that is newer than the target database's, and
- # that source's database's document's content is the same as the
- # target's document's conflict's, the target's document's conflict gets
- # autoresolved, and the document's revision bumped.
- #
- # idea is as follows:
- # A B
- # a1 -
- # `------->
- # a1 a1
- # v v
- # a2 a1b1
- # `------->
- # a1b1+a2 a1b1
- # v
- # a1b1+a2 a1b2 (a1b2 has same content as a2)
- # <-------'
- # a3b2 a3b2 (autoresolved and propagated)
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'both')
- self.db1.create_doc_from_json(simple_doc, doc_id='doc')
- self.sync(self.db1, self.db2)
- for db, content in [(self.db1, '{}'), (self.db2, '{"hi": 42}')]:
- doc = db.get_doc('doc')
- doc.set_json(content)
- db.put_doc(doc)
- self.sync(self.db1, self.db2)
- # db1 and db2 now both have a doc of {hi:42}, but db1 has a conflict
- doc = self.db1.get_doc('doc')
- rev1 = doc.rev
- self.assertTrue(doc.has_conflicts)
- revc = self.db1.get_doc_conflicts('doc')[-1].rev
- # set db2 to have a doc of {} (same as db1 before the conflict)
- doc = self.db2.get_doc('doc')
- doc.set_json('{}')
- self.db2.put_doc(doc)
- rev2 = doc.rev
- # sync it across
- self.sync(self.db2, self.db1)
- # tadaa!
- doc = self.db1.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- vec1 = vectorclock.VectorClockRev(rev1)
- vec2 = vectorclock.VectorClockRev(rev2)
- vec3 = vectorclock.VectorClockRev(doc.rev)
- vecc = vectorclock.VectorClockRev(revc)
- self.assertTrue(vec3.is_newer(vec1))
- self.assertTrue(vec3.is_newer(vec2))
- self.assertTrue(vec3.is_newer(vecc))
- # make sure db2 now has the exact same thing
- self.assertEqual(self.db1.get_doc('doc'), self.db2.get_doc('doc'))
-
- def test_sync_autoresolves_moar_backwards_three(self):
- # same as autoresolves_moar_backwards, but with three databases (note
- # all the syncs go in the same direction -- this is a more natural
- # scenario):
- #
- # A B C
- # a1 - -
- # `------->
- # a1 a1 -
- # `------->
- # a1 a1 a1
- # v v
- # a2 a1b1 a1
- # `------------------->
- # a2 a1b1 a2
- # `------->
- # a2+a1b1 a2
- # v
- # a2 a2+a1b1 a2c1 (same as a1b1)
- # `------------------->
- # a2c1 a2+a1b1 a2c1
- # `------->
- # a2b2c1 a2b2c1 a2c1
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'both')
- self.db3 = self.create_database('test3', 'target')
- self.db1.create_doc_from_json(simple_doc, doc_id='doc')
- self.sync(self.db1, self.db2)
- self.sync(self.db2, self.db3)
- for db, content in [(self.db2, '{"hi": 42}'),
- (self.db1, '{}'),
- ]:
- doc = db.get_doc('doc')
- doc.set_json(content)
- db.put_doc(doc)
- self.sync(self.db1, self.db3)
- self.sync(self.db2, self.db3)
- # db2 and db3 now both have a doc of {}, but db2 has a
- # conflict
- doc = self.db2.get_doc('doc')
- self.assertTrue(doc.has_conflicts)
- revc = self.db2.get_doc_conflicts('doc')[-1].rev
- self.assertEqual('{}', doc.get_json())
- self.assertEqual(self.db3.get_doc('doc').get_json(), doc.get_json())
- self.assertEqual(self.db3.get_doc('doc').rev, doc.rev)
- # set db3 to have a doc of {hi:42} (same as db2 before the conflict)
- doc = self.db3.get_doc('doc')
- doc.set_json('{"hi": 42}')
- self.db3.put_doc(doc)
- rev3 = doc.rev
- # sync it across to db1
- self.sync(self.db1, self.db3)
- # db1 now has hi:42, with a rev that is newer than db2's doc
- doc = self.db1.get_doc('doc')
- rev1 = doc.rev
- self.assertFalse(doc.has_conflicts)
- self.assertEqual('{"hi": 42}', doc.get_json())
- VCR = vectorclock.VectorClockRev
- self.assertTrue(VCR(rev1).is_newer(VCR(self.db2.get_doc('doc').rev)))
- # so sync it to db2
- self.sync(self.db1, self.db2)
- # tadaa!
- doc = self.db2.get_doc('doc')
- self.assertFalse(doc.has_conflicts)
- # db2's revision of the document is strictly newer than db1's before
- # the sync, and db3's before that sync way back when
- self.assertTrue(VCR(doc.rev).is_newer(VCR(rev1)))
- self.assertTrue(VCR(doc.rev).is_newer(VCR(rev3)))
- self.assertTrue(VCR(doc.rev).is_newer(VCR(revc)))
- # make sure both dbs now have the exact same thing
- self.assertEqual(self.db1.get_doc('doc'), self.db2.get_doc('doc'))
-
- def test_sync_puts_changes(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db1.create_doc_from_json(simple_doc)
- self.assertEqual(1, self.sync(self.db1, self.db2))
- self.assertGetDoc(self.db2, doc.doc_id, doc.rev, simple_doc, False)
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- self.assertEqual(1, self.db2._get_replica_gen_and_trans_id('test1')[0])
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc.doc_id, doc.rev)],
- 'source_uid': 'test1',
- 'source_gen': 1,
- 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 1}})
-
- def test_sync_pulls_changes(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db2.create_doc_from_json(simple_doc)
- self.db1.create_index('test-idx', 'key')
- self.assertEqual(0, self.sync(self.db1, self.db2))
- self.assertGetDoc(self.db1, doc.doc_id, doc.rev, simple_doc, False)
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- self.assertEqual(1, self.db2._get_replica_gen_and_trans_id('test1')[0])
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [], 'last_known_gen': 0},
- 'return':
- {'docs': [(doc.doc_id, doc.rev)],
- 'last_gen': 1}})
- self.assertEqual([doc], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_pulling_doesnt_update_other_if_changed(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db2.create_doc_from_json(simple_doc)
- # After the local side has sent its list of docs, before we start
- # receiving the "targets" response, we update the local database with a
- # new record.
- # When we finish synchronizing, we can notice that something locally
- # was updated, and we cannot tell c2 our new updated generation
-
- def before_get_docs(state):
- if state != 'before get_docs':
- return
- self.db1.create_doc_from_json(simple_doc)
-
- self.assertEqual(0, self.sync(self.db1, self.db2,
- trace_hook=before_get_docs))
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [], 'last_known_gen': 0},
- 'return':
- {'docs': [(doc.doc_id, doc.rev)],
- 'last_gen': 1}})
- self.assertEqual(1, self.db1._get_replica_gen_and_trans_id('test2')[0])
- # c2 should not have gotten a '_record_sync_info' call, because the
- # local database had been updated more than just by the messages
- # returned from c2.
- self.assertEqual(
- (0, ''), self.db2._get_replica_gen_and_trans_id('test1'))
-
- def test_sync_doesnt_update_other_if_nothing_pulled(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(simple_doc)
-
- def no_record_sync_info(state):
- if state != 'record_sync_info':
- return
- self.fail('SyncTarget.record_sync_info was called')
- self.assertEqual(1, self.sync(self.db1, self.db2,
- trace_hook_shallow=no_record_sync_info))
- self.assertEqual(
- 1,
- self.db2._get_replica_gen_and_trans_id(self.db1._replica_uid)[0])
-
- def test_sync_ignores_convergence(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'both')
- doc = self.db1.create_doc_from_json(simple_doc)
- self.db3 = self.create_database('test3', 'target')
- self.assertEqual(1, self.sync(self.db1, self.db3))
- self.assertEqual(0, self.sync(self.db2, self.db3))
- self.assertEqual(1, self.sync(self.db1, self.db2))
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc.doc_id, doc.rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return': {'docs': [], 'last_gen': 1}})
-
- def test_sync_ignores_superseded(self):
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'both')
- doc = self.db1.create_doc_from_json(simple_doc)
- doc_rev1 = doc.rev
- self.db3 = self.create_database('test3', 'target')
- self.sync(self.db1, self.db3)
- self.sync(self.db2, self.db3)
- new_content = '{"key": "altval"}'
- doc.set_json(new_content)
- self.db1.put_doc(doc)
- doc_rev2 = doc.rev
- self.sync(self.db2, self.db1)
- self.assertLastExchangeLog(self.db1,
- {'receive':
- {'docs': [(doc.doc_id, doc_rev1)],
- 'source_uid': 'test2',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return':
- {'docs': [(doc.doc_id, doc_rev2)],
- 'last_gen': 2}})
- self.assertGetDoc(self.db1, doc.doc_id, doc_rev2, new_content, False)
-
- def test_sync_sees_remote_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(simple_doc)
- doc_id = doc1.doc_id
- doc1_rev = doc1.rev
- self.db1.create_index('test-idx', 'key')
- new_doc = '{"key": "altval"}'
- doc2 = self.db2.create_doc_from_json(new_doc, doc_id=doc_id)
- doc2_rev = doc2.rev
- self.assertTransactionLog([doc1.doc_id], self.db1)
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc_id, doc1_rev)],
- 'source_uid': 'test1',
- 'source_gen': 1, 'last_known_gen': 0},
- 'return':
- {'docs': [(doc_id, doc2_rev)],
- 'last_gen': 1}})
- self.assertTransactionLog([doc_id, doc_id], self.db1)
- self.assertGetDoc(self.db1, doc_id, doc2_rev, new_doc, True)
- self.assertGetDoc(self.db2, doc_id, doc2_rev, new_doc, False)
- from_idx = self.db1.get_from_index('test-idx', 'altval')[0]
- self.assertEqual(doc2.doc_id, from_idx.doc_id)
- self.assertEqual(doc2.rev, from_idx.rev)
- self.assertTrue(from_idx.has_conflicts)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_sees_remote_delete_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc1 = self.db1.create_doc_from_json(simple_doc)
- doc_id = doc1.doc_id
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- doc2 = self.make_document(doc1.doc_id, doc1.rev, doc1.get_json())
- new_doc = '{"key": "altval"}'
- doc1.set_json(new_doc)
- self.db1.put_doc(doc1)
- self.db2.delete_doc(doc2)
- self.assertTransactionLog([doc_id, doc_id], self.db1)
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc_id, doc1.rev)],
- 'source_uid': 'test1',
- 'source_gen': 2, 'last_known_gen': 1},
- 'return': {'docs': [(doc_id, doc2.rev)],
- 'last_gen': 2}})
- self.assertTransactionLog([doc_id, doc_id, doc_id], self.db1)
- self.assertGetDocIncludeDeleted(self.db1, doc_id, doc2.rev, None, True)
- self.assertGetDocIncludeDeleted(
- self.db2, doc_id, doc2.rev, None, False)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
-
- def test_sync_local_race_conflicted(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- doc = self.db1.create_doc_from_json(simple_doc)
- doc_id = doc.doc_id
- doc1_rev = doc.rev
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- content1 = '{"key": "localval"}'
- content2 = '{"key": "altval"}'
- doc.set_json(content2)
- self.db2.put_doc(doc)
- doc2_rev2 = doc.rev
- triggered = []
-
- def after_whatschanged(state):
- if state != 'after whats_changed':
- return
- triggered.append(True)
- doc = self.make_document(doc_id, doc1_rev, content1)
- self.db1.put_doc(doc)
-
- self.sync(self.db1, self.db2, trace_hook=after_whatschanged)
- self.assertEqual([True], triggered)
- self.assertGetDoc(self.db1, doc_id, doc2_rev2, content2, True)
- from_idx = self.db1.get_from_index('test-idx', 'altval')[0]
- self.assertEqual(doc.doc_id, from_idx.doc_id)
- self.assertEqual(doc.rev, from_idx.rev)
- self.assertTrue(from_idx.has_conflicts)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
- self.assertEqual([], self.db1.get_from_index('test-idx', 'localval'))
-
- def test_sync_propagates_deletes(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'both')
- doc1 = self.db1.create_doc_from_json(simple_doc)
- doc_id = doc1.doc_id
- self.db1.create_index('test-idx', 'key')
- self.sync(self.db1, self.db2)
- self.db2.create_index('test-idx', 'key')
- self.db3 = self.create_database('test3', 'target')
- self.sync(self.db1, self.db3)
- self.db1.delete_doc(doc1)
- deleted_rev = doc1.rev
- self.sync(self.db1, self.db2)
- self.assertLastExchangeLog(self.db2,
- {'receive':
- {'docs': [(doc_id, deleted_rev)],
- 'source_uid': 'test1',
- 'source_gen': 2, 'last_known_gen': 1},
- 'return': {'docs': [], 'last_gen': 2}})
- self.assertGetDocIncludeDeleted(
- self.db1, doc_id, deleted_rev, None, False)
- self.assertGetDocIncludeDeleted(
- self.db2, doc_id, deleted_rev, None, False)
- self.assertEqual([], self.db1.get_from_index('test-idx', 'value'))
- self.assertEqual([], self.db2.get_from_index('test-idx', 'value'))
- self.sync(self.db2, self.db3)
- self.assertLastExchangeLog(self.db3,
- {'receive':
- {'docs': [(doc_id, deleted_rev)],
- 'source_uid': 'test2',
- 'source_gen': 2,
- 'last_known_gen': 0},
- 'return':
- {'docs': [], 'last_gen': 2}})
- self.assertGetDocIncludeDeleted(
- self.db3, doc_id, deleted_rev, None, False)
-
- def test_sync_propagates_resolution(self):
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'both')
- doc1 = self.db1.create_doc_from_json('{"a": 1}', doc_id='the-doc')
- db3 = self.create_database('test3', 'both')
- self.sync(self.db2, self.db1)
- self.assertEqual(
- self.db1._get_generation_info(),
- self.db2._get_replica_gen_and_trans_id(self.db1._replica_uid))
- self.assertEqual(
- self.db2._get_generation_info(),
- self.db1._get_replica_gen_and_trans_id(self.db2._replica_uid))
- self.sync(db3, self.db1)
- # update on 2
- doc2 = self.make_document('the-doc', doc1.rev, '{"a": 2}')
- self.db2.put_doc(doc2)
- self.sync(self.db2, db3)
- self.assertEqual(db3.get_doc('the-doc').rev, doc2.rev)
- # update on 1
- doc1.set_json('{"a": 3}')
- self.db1.put_doc(doc1)
- # conflicts
- self.sync(self.db2, self.db1)
- self.sync(db3, self.db1)
- self.assertTrue(self.db2.get_doc('the-doc').has_conflicts)
- self.assertTrue(db3.get_doc('the-doc').has_conflicts)
- # resolve
- conflicts = self.db2.get_doc_conflicts('the-doc')
- doc4 = self.make_document('the-doc', None, '{"a": 4}')
- revs = [doc.rev for doc in conflicts]
- self.db2.resolve_doc(doc4, revs)
- doc2 = self.db2.get_doc('the-doc')
- self.assertEqual(doc4.get_json(), doc2.get_json())
- self.assertFalse(doc2.has_conflicts)
- self.sync(self.db2, db3)
- doc3 = db3.get_doc('the-doc')
- self.assertEqual(doc4.get_json(), doc3.get_json())
- self.assertFalse(doc3.has_conflicts)
-
- def test_sync_supersedes_conflicts(self):
- self.db1 = self.create_database('test1', 'both')
- self.db2 = self.create_database('test2', 'target')
- db3 = self.create_database('test3', 'both')
- doc1 = self.db1.create_doc_from_json('{"a": 1}', doc_id='the-doc')
- self.db2.create_doc_from_json('{"b": 1}', doc_id='the-doc')
- db3.create_doc_from_json('{"c": 1}', doc_id='the-doc')
- self.sync(db3, self.db1)
- self.assertEqual(
- self.db1._get_generation_info(),
- db3._get_replica_gen_and_trans_id(self.db1._replica_uid))
- self.assertEqual(
- db3._get_generation_info(),
- self.db1._get_replica_gen_and_trans_id(db3._replica_uid))
- self.sync(db3, self.db2)
- self.assertEqual(
- self.db2._get_generation_info(),
- db3._get_replica_gen_and_trans_id(self.db2._replica_uid))
- self.assertEqual(
- db3._get_generation_info(),
- self.db2._get_replica_gen_and_trans_id(db3._replica_uid))
- self.assertEqual(3, len(db3.get_doc_conflicts('the-doc')))
- doc1.set_json('{"a": 2}')
- self.db1.put_doc(doc1)
- self.sync(db3, self.db1)
- # original doc1 should have been removed from conflicts
- self.assertEqual(3, len(db3.get_doc_conflicts('the-doc')))
-
- def test_sync_stops_after_get_sync_info(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc)
- self.sync(self.db1, self.db2)
-
- def put_hook(state):
- self.fail("Tracehook triggered for %s" % (state,))
-
- self.sync(self.db1, self.db2, trace_hook_shallow=put_hook)
-
- def test_sync_detects_rollback_in_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc1')
- self.sync(self.db1, self.db2)
- db1_copy = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidGeneration, self.sync, db1_copy, self.db2)
-
- def test_sync_detects_rollback_in_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- db2_copy = self.copy_database(self.db2)
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidGeneration, self.sync, self.db1, db2_copy)
-
- def test_sync_detects_diverged_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- db3 = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- db3.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, db3, self.db2)
-
- def test_sync_detects_diverged_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- db3 = self.copy_database(self.db2)
- db3.create_doc_from_json(tests.nested_doc, doc_id="divergent")
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, self.db1, db3)
-
- def test_sync_detects_rollback_and_divergence_in_source(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc1')
- self.sync(self.db1, self.db2)
- db1_copy = self.copy_database(self.db1)
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.sync(self.db1, self.db2)
- db1_copy.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- db1_copy.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, db1_copy, self.db2)
-
- def test_sync_detects_rollback_and_divergence_in_target(self):
- self.db1 = self.create_database('test1', 'source')
- self.db2 = self.create_database('test2', 'target')
- self.db1.create_doc_from_json(tests.simple_doc, doc_id="divergent")
- self.sync(self.db1, self.db2)
- db2_copy = self.copy_database(self.db2)
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- self.db2.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.sync(self.db1, self.db2)
- db2_copy.create_doc_from_json(tests.simple_doc, doc_id='doc2')
- db2_copy.create_doc_from_json(tests.simple_doc, doc_id='doc3')
- self.assertRaises(
- errors.InvalidTransactionId, self.sync, self.db1, db2_copy)
-
-
-class TestDbSync(tests.TestCaseWithServer):
- """Test db.sync remote sync shortcut"""
-
- scenarios = [
- ('py-http', {
- 'make_app_with_state': make_http_app,
- 'make_database_for_test': tests.make_memory_database_for_test,
- }),
- ('py-oauth-http', {
- 'make_app_with_state': make_oauth_http_app,
- 'make_database_for_test': tests.make_memory_database_for_test,
- 'oauth': True
- }),
- ]
-
- oauth = False
-
- def do_sync(self, target_name):
- if self.oauth:
- path = '~/' + target_name
- extra = dict(creds={'oauth': {
- 'consumer_key': tests.consumer1.key,
- 'consumer_secret': tests.consumer1.secret,
- 'token_key': tests.token1.key,
- 'token_secret': tests.token1.secret,
- }})
- else:
- path = target_name
- extra = {}
- target_url = self.getURL(path)
- return self.db.sync(target_url, **extra)
-
- def setUp(self):
- super(TestDbSync, self).setUp()
- self.startServer()
- self.db = self.make_database_for_test(self, 'test1')
- self.db2 = self.request_state._create_database('test2.db')
-
- def test_db_sync(self):
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- doc2 = self.db2.create_doc_from_json(tests.nested_doc)
- local_gen_before_sync = self.do_sync('test2.db')
- gen, _, changes = self.db.whats_changed(local_gen_before_sync)
- self.assertEqual(1, len(changes))
- self.assertEqual(doc2.doc_id, changes[0][0])
- self.assertEqual(1, gen - local_gen_before_sync)
- self.assertGetDoc(self.db2, doc1.doc_id, doc1.rev, tests.simple_doc,
- False)
- self.assertGetDoc(self.db, doc2.doc_id, doc2.rev, tests.nested_doc,
- False)
-
- def test_db_sync_autocreate(self):
- doc1 = self.db.create_doc_from_json(tests.simple_doc)
- local_gen_before_sync = self.do_sync('test3.db')
- gen, _, changes = self.db.whats_changed(local_gen_before_sync)
- self.assertEqual(0, gen - local_gen_before_sync)
- db3 = self.request_state.open_database('test3.db')
- gen, _, changes = db3.whats_changed()
- self.assertEqual(1, len(changes))
- self.assertEqual(doc1.doc_id, changes[0][0])
- self.assertGetDoc(db3, doc1.doc_id, doc1.rev, tests.simple_doc,
- False)
- t_gen, _ = self.db._get_replica_gen_and_trans_id('test3.db')
- s_gen, _ = db3._get_replica_gen_and_trans_id('test1')
- self.assertEqual(1, t_gen)
- self.assertEqual(1, s_gen)
-
-
-class TestRemoteSyncIntegration(tests.TestCaseWithServer):
- """Integration tests for the most common sync scenario local -> remote"""
-
- make_app_with_state = staticmethod(make_http_app)
-
- def setUp(self):
- super(TestRemoteSyncIntegration, self).setUp()
- self.startServer()
- self.db1 = inmemory.InMemoryDatabase('test1')
- self.db2 = self.request_state._create_database('test2')
-
- def test_sync_tracks_generations_incrementally(self):
- doc11 = self.db1.create_doc_from_json('{"a": 1}')
- doc12 = self.db1.create_doc_from_json('{"a": 2}')
- doc21 = self.db2.create_doc_from_json('{"b": 1}')
- doc22 = self.db2.create_doc_from_json('{"b": 2}')
- #sanity
- self.assertEqual(2, len(self.db1._get_transaction_log()))
- self.assertEqual(2, len(self.db2._get_transaction_log()))
- progress1 = []
- progress2 = []
- _do_set_replica_gen_and_trans_id = \
- self.db1._do_set_replica_gen_and_trans_id
-
- def set_sync_generation_witness1(other_uid, other_gen, trans_id):
- progress1.append((other_uid, other_gen,
- [d for d, t in
- self.db1._get_transaction_log()[2:]]))
- _do_set_replica_gen_and_trans_id(other_uid, other_gen, trans_id)
- self.patch(self.db1, '_do_set_replica_gen_and_trans_id',
- set_sync_generation_witness1)
- _do_set_replica_gen_and_trans_id2 = \
- self.db2._do_set_replica_gen_and_trans_id
-
- def set_sync_generation_witness2(other_uid, other_gen, trans_id):
- progress2.append((other_uid, other_gen,
- [d for d, t in
- self.db2._get_transaction_log()[2:]]))
- _do_set_replica_gen_and_trans_id2(other_uid, other_gen, trans_id)
- self.patch(self.db2, '_do_set_replica_gen_and_trans_id',
- set_sync_generation_witness2)
-
- db2_url = self.getURL('test2')
- self.db1.sync(db2_url)
-
- self.assertEqual([('test2', 1, [doc21.doc_id]),
- ('test2', 2, [doc21.doc_id, doc22.doc_id]),
- ('test2', 4, [doc21.doc_id, doc22.doc_id])],
- progress1)
- self.assertEqual([('test1', 1, [doc11.doc_id]),
- ('test1', 2, [doc11.doc_id, doc12.doc_id]),
- ('test1', 4, [doc11.doc_id, doc12.doc_id])],
- progress2)
-
-
-load_tests = tests.load_with_scenarios
diff --git a/src/leap/soledad/tests/u1db_tests/testing-certs/Makefile b/src/leap/soledad/tests/u1db_tests/testing-certs/Makefile
deleted file mode 100644
index 2385e75b..00000000
--- a/src/leap/soledad/tests/u1db_tests/testing-certs/Makefile
+++ /dev/null
@@ -1,35 +0,0 @@
-CATOP=./demoCA
-ORIG_CONF=/usr/lib/ssl/openssl.cnf
-ELEVEN_YEARS=-days 4015
-
-init:
- cp $(ORIG_CONF) ca.conf
- install -d $(CATOP)
- install -d $(CATOP)/certs
- install -d $(CATOP)/crl
- install -d $(CATOP)/newcerts
- install -d $(CATOP)/private
- touch $(CATOP)/index.txt
- echo 01>$(CATOP)/crlnumber
- @echo '**** Making CA certificate ...'
- openssl req -nodes -new \
- -newkey rsa -keyout $(CATOP)/private/cakey.pem \
- -out $(CATOP)/careq.pem \
- -multivalue-rdn \
- -subj "/C=UK/ST=-/O=u1db LOCAL TESTING ONLY, DO NO TRUST/CN=u1db testing CA"
- openssl ca -config ./ca.conf -create_serial \
- -out $(CATOP)/cacert.pem $(ELEVEN_YEARS) -batch \
- -keyfile $(CATOP)/private/cakey.pem -selfsign \
- -extensions v3_ca -infiles $(CATOP)/careq.pem
-
-pems:
- cp ./demoCA/cacert.pem .
- openssl req -new -config ca.conf \
- -multivalue-rdn \
- -subj "/O=u1db LOCAL TESTING ONLY, DO NOT TRUST/CN=localhost" \
- -nodes -keyout testing.key -out newreq.pem $(ELEVEN_YEARS)
- openssl ca -batch -config ./ca.conf $(ELEVEN_YEARS) \
- -policy policy_anything \
- -out testing.cert -infiles newreq.pem
-
-.PHONY: init pems
diff --git a/src/leap/soledad/tests/u1db_tests/testing-certs/cacert.pem b/src/leap/soledad/tests/u1db_tests/testing-certs/cacert.pem
deleted file mode 100644
index c019a730..00000000
--- a/src/leap/soledad/tests/u1db_tests/testing-certs/cacert.pem
+++ /dev/null
@@ -1,58 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number:
- e4:de:01:76:c4:78:78:7e
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=UK, ST=-, O=u1db LOCAL TESTING ONLY, DO NO TRUST, CN=u1db testing CA
- Validity
- Not Before: May 3 11:11:11 2012 GMT
- Not After : May 1 11:11:11 2023 GMT
- Subject: C=UK, ST=-, O=u1db LOCAL TESTING ONLY, DO NO TRUST, CN=u1db testing CA
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (1024 bit)
- Modulus:
- 00:bc:91:a5:7f:7d:37:f7:06:c7:db:5b:83:6a:6b:
- 63:c3:8b:5c:f7:84:4d:97:6d:d4:be:bf:e7:79:a8:
- c1:03:57:ec:90:d4:20:e7:02:95:d9:a6:49:e3:f9:
- 9a:ea:37:b9:b2:02:62:ab:40:d3:42:bb:4a:4e:a2:
- 47:71:0f:1d:a2:c5:94:a1:cf:35:d3:23:32:42:c0:
- 1e:8d:cb:08:58:fb:8a:5c:3e:ea:eb:d5:2c:ed:d6:
- aa:09:b4:b5:7d:e3:45:c9:ae:c2:82:b2:ae:c0:81:
- bc:24:06:65:a9:e7:e0:61:ac:25:ee:53:d3:d7:be:
- 22:f7:00:a2:ad:c6:0e:3a:39
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Subject Key Identifier:
- DB:3D:93:51:6C:32:15:54:8F:10:50:FC:49:4F:36:15:28:BB:95:6D
- X509v3 Authority Key Identifier:
- keyid:DB:3D:93:51:6C:32:15:54:8F:10:50:FC:49:4F:36:15:28:BB:95:6D
-
- X509v3 Basic Constraints:
- CA:TRUE
- Signature Algorithm: sha1WithRSAEncryption
- 72:9b:c1:f7:07:65:83:36:25:4e:01:2f:b7:4a:f2:a4:00:28:
- 80:c7:56:2c:32:39:90:13:61:4b:bb:12:c5:44:9d:42:57:85:
- 28:19:70:69:e1:43:c8:bd:11:f6:94:df:91:2d:c3:ea:82:8d:
- b4:8f:5d:47:a3:00:99:53:29:93:27:6c:c5:da:c1:20:6f:ab:
- ec:4a:be:34:f3:8f:02:e5:0c:c0:03:ac:2b:33:41:71:4f:0a:
- 72:5a:b4:26:1a:7f:81:bc:c0:95:8a:06:87:a8:11:9f:5c:73:
- 38:df:5a:69:40:21:29:ad:46:23:56:75:e1:e9:8b:10:18:4c:
- 7b:54
------BEGIN CERTIFICATE-----
-MIICkjCCAfugAwIBAgIJAOTeAXbEeHh+MA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV
-BAYTAlVLMQowCAYDVQQIDAEtMS0wKwYDVQQKDCR1MWRiIExPQ0FMIFRFU1RJTkcg
-T05MWSwgRE8gTk8gVFJVU1QxGDAWBgNVBAMMD3UxZGIgdGVzdGluZyBDQTAeFw0x
-MjA1MDMxMTExMTFaFw0yMzA1MDExMTExMTFaMGIxCzAJBgNVBAYTAlVLMQowCAYD
-VQQIDAEtMS0wKwYDVQQKDCR1MWRiIExPQ0FMIFRFU1RJTkcgT05MWSwgRE8gTk8g
-VFJVU1QxGDAWBgNVBAMMD3UxZGIgdGVzdGluZyBDQTCBnzANBgkqhkiG9w0BAQEF
-AAOBjQAwgYkCgYEAvJGlf3039wbH21uDamtjw4tc94RNl23Uvr/neajBA1fskNQg
-5wKV2aZJ4/ma6je5sgJiq0DTQrtKTqJHcQ8dosWUoc810yMyQsAejcsIWPuKXD7q
-69Us7daqCbS1feNFya7CgrKuwIG8JAZlqefgYawl7lPT174i9wCircYOOjkCAwEA
-AaNQME4wHQYDVR0OBBYEFNs9k1FsMhVUjxBQ/ElPNhUou5VtMB8GA1UdIwQYMBaA
-FNs9k1FsMhVUjxBQ/ElPNhUou5VtMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEF
-BQADgYEAcpvB9wdlgzYlTgEvt0rypAAogMdWLDI5kBNhS7sSxUSdQleFKBlwaeFD
-yL0R9pTfkS3D6oKNtI9dR6MAmVMpkydsxdrBIG+r7Eq+NPOPAuUMwAOsKzNBcU8K
-clq0Jhp/gbzAlYoGh6gRn1xzON9aaUAhKa1GI1Z14emLEBhMe1Q=
------END CERTIFICATE-----
diff --git a/src/leap/soledad/tests/u1db_tests/testing-certs/testing.cert b/src/leap/soledad/tests/u1db_tests/testing-certs/testing.cert
deleted file mode 100644
index 985684fb..00000000
--- a/src/leap/soledad/tests/u1db_tests/testing-certs/testing.cert
+++ /dev/null
@@ -1,61 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number:
- e4:de:01:76:c4:78:78:7f
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=UK, ST=-, O=u1db LOCAL TESTING ONLY, DO NO TRUST, CN=u1db testing CA
- Validity
- Not Before: May 3 11:11:14 2012 GMT
- Not After : May 1 11:11:14 2023 GMT
- Subject: O=u1db LOCAL TESTING ONLY, DO NOT TRUST, CN=localhost
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (1024 bit)
- Modulus:
- 00:c6:1d:72:d3:c5:e4:fc:d1:4c:d9:e4:08:3e:90:
- 10:ce:3f:1f:87:4a:1d:4f:7f:2a:5a:52:c9:65:4f:
- d9:2c:bf:69:75:18:1a:b5:c9:09:32:00:47:f5:60:
- aa:c6:dd:3a:87:37:5f:16:be:de:29:b5:ea:fc:41:
- 7e:eb:77:bb:df:63:c3:06:1e:ed:e9:a0:67:1a:f1:
- ec:e1:9d:f7:9c:8f:1c:fa:c3:66:7b:39:dc:70:ae:
- 09:1b:9c:c0:9a:c4:90:77:45:8e:39:95:a9:2f:92:
- 43:bd:27:07:5a:99:51:6e:76:a0:af:dd:b1:2c:8f:
- ca:8b:8c:47:0d:f6:6e:fc:69
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Basic Constraints:
- CA:FALSE
- Netscape Comment:
- OpenSSL Generated Certificate
- X509v3 Subject Key Identifier:
- 1C:63:85:E1:1D:F3:89:2E:6C:4E:3F:FB:D0:10:64:5A:C1:22:6A:2A
- X509v3 Authority Key Identifier:
- keyid:DB:3D:93:51:6C:32:15:54:8F:10:50:FC:49:4F:36:15:28:BB:95:6D
-
- Signature Algorithm: sha1WithRSAEncryption
- 1d:6d:3e:bd:93:fd:bd:3e:17:b8:9f:f0:99:7f:db:50:5c:b2:
- 01:42:03:b5:d5:94:05:d3:f6:8e:80:82:55:47:1f:58:f2:18:
- 6c:ab:ef:43:2c:2f:10:e1:7c:c4:5c:cc:ac:50:50:22:42:aa:
- 35:33:f5:b9:f3:a6:66:55:d9:36:f4:f2:e4:d4:d9:b5:2c:52:
- 66:d4:21:17:97:22:b8:9b:d7:0e:7c:3d:ce:85:19:ca:c4:d2:
- 58:62:31:c6:18:3e:44:fc:f4:30:b6:95:87:ee:21:4a:08:f0:
- af:3c:8f:c4:ba:5e:a1:5c:37:1a:7d:7b:fe:66:ae:62:50:17:
- 31:ca
------BEGIN CERTIFICATE-----
-MIICnzCCAgigAwIBAgIJAOTeAXbEeHh/MA0GCSqGSIb3DQEBBQUAMGIxCzAJBgNV
-BAYTAlVLMQowCAYDVQQIDAEtMS0wKwYDVQQKDCR1MWRiIExPQ0FMIFRFU1RJTkcg
-T05MWSwgRE8gTk8gVFJVU1QxGDAWBgNVBAMMD3UxZGIgdGVzdGluZyBDQTAeFw0x
-MjA1MDMxMTExMTRaFw0yMzA1MDExMTExMTRaMEQxLjAsBgNVBAoMJXUxZGIgTE9D
-QUwgVEVTVElORyBPTkxZLCBETyBOT1QgVFJVU1QxEjAQBgNVBAMMCWxvY2FsaG9z
-dDCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAxh1y08Xk/NFM2eQIPpAQzj8f
-h0odT38qWlLJZU/ZLL9pdRgatckJMgBH9WCqxt06hzdfFr7eKbXq/EF+63e732PD
-Bh7t6aBnGvHs4Z33nI8c+sNmeznccK4JG5zAmsSQd0WOOZWpL5JDvScHWplRbnag
-r92xLI/Ki4xHDfZu/GkCAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0E
-HxYdT3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFBxjheEd
-84kubE4/+9AQZFrBImoqMB8GA1UdIwQYMBaAFNs9k1FsMhVUjxBQ/ElPNhUou5Vt
-MA0GCSqGSIb3DQEBBQUAA4GBAB1tPr2T/b0+F7if8Jl/21BcsgFCA7XVlAXT9o6A
-glVHH1jyGGyr70MsLxDhfMRczKxQUCJCqjUz9bnzpmZV2Tb08uTU2bUsUmbUIReX
-Irib1w58Pc6FGcrE0lhiMcYYPkT89DC2lYfuIUoI8K88j8S6XqFcNxp9e/5mrmJQ
-FzHK
------END CERTIFICATE-----
diff --git a/src/leap/soledad/tests/u1db_tests/testing-certs/testing.key b/src/leap/soledad/tests/u1db_tests/testing-certs/testing.key
deleted file mode 100644
index d83d4920..00000000
--- a/src/leap/soledad/tests/u1db_tests/testing-certs/testing.key
+++ /dev/null
@@ -1,16 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIICdgIBADANBgkqhkiG9w0BAQEFAASCAmAwggJcAgEAAoGBAMYdctPF5PzRTNnk
-CD6QEM4/H4dKHU9/KlpSyWVP2Sy/aXUYGrXJCTIAR/VgqsbdOoc3Xxa+3im16vxB
-fut3u99jwwYe7emgZxrx7OGd95yPHPrDZns53HCuCRucwJrEkHdFjjmVqS+SQ70n
-B1qZUW52oK/dsSyPyouMRw32bvxpAgMBAAECgYBs3lXxhjg1rhabTjIxnx19GTcM
-M3Az9V+izweZQu3HJ1CeZiaXauhAr+LbNsniCkRVddotN6oCJdQB10QVxXBZc9Jz
-HPJ4zxtZfRZlNMTMmG7eLWrfxpgWnb/BUjDb40yy1nhr9yhDUnI/8RoHDRHnAEHZ
-/CnHGUrqcVcrY5zJAQJBAPLhBJg9W88JVmcOKdWxRgs7dLHnZb999Kv1V5mczmAi
-jvGvbUmucqOqke6pTUHNYyNHqU6pySzGUi2cH+BAkFECQQDQ0VoAOysg6FVoT15v
-tGh57t5sTiCZZ7PS8jwvtThsgA+vcf6c16XWzXgjGXSap4r2QDOY2rI5lsWLaQ8T
-+fyZAkAfyFJRmbXp4c7srW3MCOahkaYzoZQu+syJtBFCiMJ40gzik5I5khpuUGPI
-V19EvRu8AiSlppIsycb3MPb64XgBAkEAy7DrUf5le5wmc7G4NM6OeyJ+5LbxJbL6
-vnJ8My1a9LuWkVVpQCU7J+UVo2dZTuLPspW9vwTVhUeFOxAoHRxlQQJAFem93f7m
-el2BkB2EFqU3onPejkZ5UrDmfmeOQR1axMQNSXqSxcJxqa16Ru1BWV2gcWRbwajQ
-oc+kuJThu/r/Ug==
------END PRIVATE KEY-----
diff --git a/src/leap/soledad/util.py b/src/leap/soledad/util.py
deleted file mode 100644
index 4bc4d2c9..00000000
--- a/src/leap/soledad/util.py
+++ /dev/null
@@ -1,55 +0,0 @@
-import os
-import gnupg
-import re
-
-
-class GPGWrapper(gnupg.GPG):
- """
- This is a temporary class for handling GPG requests, and should be
- replaced by a more general class used throughout the project.
- """
-
- GNUPG_HOME = os.environ['HOME'] + "/.config/leap/gnupg"
- GNUPG_BINARY = "/usr/bin/gpg" # this has to be changed based on OS
-
- def __init__(self, gpghome=GNUPG_HOME, gpgbinary=GNUPG_BINARY):
- super(GPGWrapper, self).__init__(gnupghome=gpghome,
- gpgbinary=gpgbinary)
-
- def find_key(self, email):
- """
- Find user's key based on their email.
- """
- for key in self.list_keys():
- for uid in key['uids']:
- if re.search(email, uid):
- return key
- raise LookupError("GnuPG public key for %s not found!" % email)
-
- def encrypt(self, data, recipient, sign=None, always_trust=True,
- passphrase=None, symmetric=False):
- # TODO: devise a way so we don't need to "always trust".
- return super(GPGWrapper, self).encrypt(data, recipient, sign=sign,
- always_trust=always_trust,
- passphrase=passphrase,
- symmetric=symmetric)
-
- def decrypt(self, data, always_trust=True, passphrase=None):
- # TODO: devise a way so we don't need to "always trust".
- return super(GPGWrapper, self).decrypt(data,
- always_trust=always_trust,
- passphrase=passphrase)
-
- def send_keys(self, keyserver, *keyids):
- """
- Send keys to a keyserver
- """
- result = self.result_map['list'](self)
- gnupg.logger.debug('send_keys: %r', keyids)
- data = gnupg._make_binary_stream("", self.encoding)
- args = ['--keyserver', keyserver, '--send-keys']
- args.extend(keyids)
- self._handle_io(args, data, result, binary=True)
- gnupg.logger.debug('send_keys result: %r', result.__dict__)
- data.close()
- return result
diff --git a/src/leap/testing/__init__.py b/src/leap/testing/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/testing/__init__.py
+++ /dev/null
diff --git a/src/leap/testing/basetest.py b/src/leap/testing/basetest.py
deleted file mode 100644
index 3186e1eb..00000000
--- a/src/leap/testing/basetest.py
+++ /dev/null
@@ -1,85 +0,0 @@
-import os
-import platform
-import shutil
-import tempfile
-
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
-from leap.base.config import get_username, get_groupname
-from leap.util.fileutil import mkdir_p, check_and_fix_urw_only
-
-_system = platform.system()
-
-
-class BaseLeapTest(unittest.TestCase):
-
- __name__ = "leap_test"
-
- @classmethod
- def setUpClass(cls):
- cls.old_path = os.environ['PATH']
- cls.old_home = os.environ['HOME']
- cls.tempdir = tempfile.mkdtemp(prefix="leap_tests-")
- cls.home = cls.tempdir
- bin_tdir = os.path.join(
- cls.tempdir,
- 'bin')
- os.environ["PATH"] = bin_tdir
- os.environ["HOME"] = cls.tempdir
-
- @classmethod
- def tearDownClass(cls):
- os.environ["PATH"] = cls.old_path
- os.environ["HOME"] = cls.old_home
- # safety check
- assert cls.tempdir.startswith('/tmp/leap_tests-')
- shutil.rmtree(cls.tempdir)
-
- # you have to override these methods
- # this way we ensure we did not put anything
- # here that you can forget to call.
-
- def setUp(self):
- raise NotImplementedError("abstract base class")
-
- def tearDown(self):
- raise NotImplementedError("abstract base class")
-
- #
- # helper methods
- #
-
- def get_tempfile(self, filename):
- return os.path.join(self.tempdir, filename)
-
- def get_username(self):
- return get_username()
-
- def get_groupname(self):
- return get_groupname()
-
- def _missing_test_for_plat(self, do_raise=False):
- if do_raise:
- raise NotImplementedError(
- "This test is not implemented "
- "for the running platform: %s" %
- _system)
-
- def touch(self, filepath):
- folder, filename = os.path.split(filepath)
- if not os.path.isdir(folder):
- mkdir_p(folder)
- # XXX should move to test_basetest
- self.assertTrue(os.path.isdir(folder))
-
- with open(filepath, 'w') as fp:
- fp.write(' ')
-
- # XXX should move to test_basetest
- self.assertTrue(os.path.isfile(filepath))
-
- def chmod600(self, filepath):
- check_and_fix_urw_only(filepath)
diff --git a/src/leap/testing/cacert.pem b/src/leap/testing/cacert.pem
deleted file mode 100644
index 6989c480..00000000
--- a/src/leap/testing/cacert.pem
+++ /dev/null
@@ -1,23 +0,0 @@
------BEGIN CERTIFICATE-----
-MIID1TCCAr2gAwIBAgIJAOv0BS09D8byMA0GCSqGSIb3DQEBBQUAMIGAMQswCQYD
-VQQGEwJVUzETMBEGA1UECAwKY3liZXJzcGFjZTEnMCUGA1UECgweTEVBUCBFbmNy
-eXB0aW9uIEFjY2VzcyBQcm9qZWN0MRYwFAYDVQQDDA10ZXN0cy1sZWFwLnNlMRsw
-GQYJKoZIhvcNAQkBFgxpbmZvQGxlYXAuc2UwHhcNMTIwODMxMTYyNjMwWhcNMTUw
-ODMxMTYyNjMwWjCBgDELMAkGA1UEBhMCVVMxEzARBgNVBAgMCmN5YmVyc3BhY2Ux
-JzAlBgNVBAoMHkxFQVAgRW5jcnlwdGlvbiBBY2Nlc3MgUHJvamVjdDEWMBQGA1UE
-AwwNdGVzdHMtbGVhcC5zZTEbMBkGCSqGSIb3DQEJARYMaW5mb0BsZWFwLnNlMIIB
-IjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1pU7OU+abrUXFZwp6X0LlF0f
-xQvC1Nmr5sFH7N9RTu3bdwY2t57ECP2TPkH6+x7oOvCTgAMxIE1scWEEkfgKViqW
-FH/Om1UW1PMaiDYGtFuqEuxM95FvaYxp2K6rzA37WNsedA28sCYzhRD+/5HqbCNT
-3rRS2cPaVO8kXI/5bgd8bUk3009pWTg4SvTtOW/9MWJbBH5f5JWmMn7Ayt6hIdT/
-E6npofEK/UCqAlEscARYFXSB/F8nK1whjo9mGFjMUd7d/25UbFHqOk4K7ishD4DH
-F7LaS84rS+Sjwn3YtDdDQblGghJfz8X1AfPSGivGnvLVdkmMF9Y2hJlSQ7+C5wID
-AQABo1AwTjAdBgNVHQ4EFgQUnpJEv4FnlqKbfm7mprudKdrnOAowHwYDVR0jBBgw
-FoAUnpJEv4FnlqKbfm7mprudKdrnOAowDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0B
-AQUFAAOCAQEAGW66qwdK/ATRVZkTpI2sgi+2dWD5tY4VyZuJIrRwfXsGPeVvmdsa
-zDmwW5dMkth1Of5yO6o7ijvUvfnw/UCLNLNICKZhH5G0DHstfBeFc0jnP2MqOZCp
-puRGPBlO2nxUCvoGcPRUKGQK9XSYmxcmaSFyzKVDMLnmH+Lakj5vaY9a8ZAcZTz7
-T5qePxKAxg+RIlH8Ftc485QP3fhqPYPrRsL3g6peiqCvIRshoP1MSoh19boI+1uX
-wHQ/NyDkL5ErKC5JCSpaeF8VG1ek570kKWQLuQAbnlXZw+Sqfu35CIdizHaYGEcx
-xA8oXH4L2JaT2x9GKDSpCmB2xXy/NVamUg==
------END CERTIFICATE-----
diff --git a/src/leap/testing/https_server.py b/src/leap/testing/https_server.py
deleted file mode 100644
index 21191c32..00000000
--- a/src/leap/testing/https_server.py
+++ /dev/null
@@ -1,68 +0,0 @@
-from BaseHTTPServer import HTTPServer
-import os
-import ssl
-import SocketServer
-import threading
-import unittest
-
-_where = os.path.split(__file__)[0]
-
-
-def where(filename):
- return os.path.join(_where, filename)
-
-
-class HTTPSServer(HTTPServer):
- def server_bind(self):
- SocketServer.TCPServer.server_bind(self)
- self.socket = ssl.wrap_socket(
- self.socket, server_side=True,
- certfile=where("leaptestscert.pem"),
- keyfile=where("leaptestskey.pem"),
- ca_certs=where("cacert.pem"),
- ssl_version=ssl.PROTOCOL_SSLv23)
-
-
-class TestServerThread(threading.Thread):
- def __init__(self, test_object, request_handler):
- threading.Thread.__init__(self)
- self.request_handler = request_handler
- self.test_object = test_object
-
- def run(self):
- self.server = HTTPSServer(('localhost', 0), self.request_handler)
- host, port = self.server.socket.getsockname()
- self.test_object.HOST, self.test_object.PORT = host, port
- self.test_object.server_started.set()
- self.test_object = None
- try:
- self.server.serve_forever(0.05)
- finally:
- self.server.server_close()
-
- def stop(self):
- self.server.shutdown()
-
-
-class BaseHTTPSServerTestCase(unittest.TestCase):
- """
- derived classes need to implement a request_handler
- """
- def setUp(self):
- self.server_started = threading.Event()
- self.thread = TestServerThread(self, self.request_handler)
- self.thread.start()
- self.server_started.wait()
-
- def tearDown(self):
- self.thread.stop()
-
- def get_server(self):
- host, port = self.HOST, self.PORT
- if host == "127.0.0.1":
- host = "localhost"
- return "%s:%s" % (host, port)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/testing/leaptestscert.pem b/src/leap/testing/leaptestscert.pem
deleted file mode 100644
index 65596b1a..00000000
--- a/src/leap/testing/leaptestscert.pem
+++ /dev/null
@@ -1,84 +0,0 @@
-Certificate:
- Data:
- Version: 3 (0x2)
- Serial Number:
- eb:f4:05:2d:3d:0f:c6:f3
- Signature Algorithm: sha1WithRSAEncryption
- Issuer: C=US, ST=cyberspace, O=LEAP Encryption Access Project, CN=tests-leap.se/emailAddress=info@leap.se
- Validity
- Not Before: Aug 31 16:30:17 2012 GMT
- Not After : Aug 31 16:30:17 2013 GMT
- Subject: C=US, ST=cyberspace, L=net, O=LEAP Encryption Access Project, CN=localhost/emailAddress=info@leap.se
- Subject Public Key Info:
- Public Key Algorithm: rsaEncryption
- Public-Key: (2048 bit)
- Modulus:
- 00:bc:f1:c4:05:ce:4b:d5:9b:9a:fa:c1:a5:0c:89:
- 15:7e:05:69:b6:a4:62:38:3a:d6:14:4a:36:aa:3c:
- 31:70:54:2e:bf:7d:05:19:ad:7b:0c:a9:a6:7d:46:
- be:83:62:cb:ea:b9:48:6c:7d:78:a0:10:0b:ad:8a:
- 74:7a:b8:ff:32:85:64:36:90:dc:38:dd:90:6e:07:
- 82:70:ae:5f:4e:1f:f4:46:98:f3:98:b4:fa:08:65:
- bf:d6:ec:a9:ba:7e:a8:f0:40:a2:d0:1a:cb:e6:fc:
- 95:c5:54:63:92:5b:b8:0a:36:cc:26:d3:2b:ad:16:
- ff:49:53:f4:65:7c:64:27:9a:f5:12:75:11:a5:0c:
- 5a:ea:1e:e4:31:f3:a6:2b:db:0e:4a:5d:aa:47:3a:
- f0:5e:2a:d5:6f:74:b6:f8:bc:9a:73:d0:fa:8a:be:
- a8:69:47:9b:07:45:d9:b5:cd:1c:9b:c5:41:9a:65:
- cc:99:a0:bd:bf:b5:e8:9f:66:5f:69:c9:6d:c8:68:
- 50:68:74:ae:8e:12:7e:9c:24:4f:dc:05:61:b7:8a:
- 6d:2a:95:43:d9:3f:fe:d8:c9:a7:ae:63:cd:30:d5:
- 95:84:18:2d:12:b5:2d:a6:fe:37:dd:74:b8:f8:a5:
- 59:18:8f:ca:f7:ae:63:0d:9d:66:51:7d:9c:40:48:
- 9b:a1
- Exponent: 65537 (0x10001)
- X509v3 extensions:
- X509v3 Basic Constraints:
- CA:FALSE
- Netscape Comment:
- OpenSSL Generated Certificate
- X509v3 Subject Key Identifier:
- B2:50:B4:C6:38:8F:BA:C4:3B:69:4C:6B:45:7C:CF:08:48:36:02:E0
- X509v3 Authority Key Identifier:
- keyid:9E:92:44:BF:81:67:96:A2:9B:7E:6E:E6:A6:BB:9D:29:DA:E7:38:0A
-
- Signature Algorithm: sha1WithRSAEncryption
- aa:ab:d4:27:e3:cb:42:05:55:fd:24:b3:e5:55:7d:fb:ce:6c:
- ff:c7:96:f0:7d:30:a1:53:4a:04:eb:a4:24:5e:96:ee:65:ef:
- e5:aa:08:47:9d:aa:95:2a:bb:6a:28:9f:51:62:63:d9:7d:1a:
- 81:a0:72:f7:9f:33:6b:3b:f4:dc:85:cd:2a:ee:83:a9:93:3d:
- 75:53:91:fa:0b:1b:10:83:11:2c:03:4e:ac:bf:c3:e6:25:74:
- 9f:14:13:4a:43:66:c2:d7:1c:6c:94:3e:a6:f3:a5:bd:01:2c:
- 9f:20:29:2e:62:82:12:d8:8b:70:1b:88:2b:18:68:5a:45:80:
- 46:2a:6a:d5:df:1f:d3:e8:57:39:0a:be:1a:d8:b0:3e:e5:b6:
- c3:69:b7:5e:c0:7b:b3:a8:a6:78:ee:0a:3d:a0:74:40:fb:42:
- 9f:f4:98:7f:47:cc:15:28:eb:b1:95:77:82:a8:65:9b:46:c3:
- 4f:f9:f4:72:be:bd:24:28:5c:0d:b3:89:e4:13:71:c8:a7:54:
- 1b:26:15:f3:c1:b2:a9:13:77:54:c2:b9:b0:c7:24:39:00:4c:
- 1a:a7:9b:e7:ad:4a:3a:32:c2:81:0d:13:2d:27:ea:98:00:a9:
- 0e:9e:38:3b:8f:80:34:17:17:3d:49:7e:f4:a5:19:05:28:08:
- 7d:de:d3:1f
------BEGIN CERTIFICATE-----
-MIIECjCCAvKgAwIBAgIJAOv0BS09D8bzMA0GCSqGSIb3DQEBBQUAMIGAMQswCQYD
-VQQGEwJVUzETMBEGA1UECAwKY3liZXJzcGFjZTEnMCUGA1UECgweTEVBUCBFbmNy
-eXB0aW9uIEFjY2VzcyBQcm9qZWN0MRYwFAYDVQQDDA10ZXN0cy1sZWFwLnNlMRsw
-GQYJKoZIhvcNAQkBFgxpbmZvQGxlYXAuc2UwHhcNMTIwODMxMTYzMDE3WhcNMTMw
-ODMxMTYzMDE3WjCBijELMAkGA1UEBhMCVVMxEzARBgNVBAgMCmN5YmVyc3BhY2Ux
-DDAKBgNVBAcMA25ldDEnMCUGA1UECgweTEVBUCBFbmNyeXB0aW9uIEFjY2VzcyBQ
-cm9qZWN0MRIwEAYDVQQDDAlsb2NhbGhvc3QxGzAZBgkqhkiG9w0BCQEWDGluZm9A
-bGVhcC5zZTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALzxxAXOS9Wb
-mvrBpQyJFX4FabakYjg61hRKNqo8MXBULr99BRmtewyppn1GvoNiy+q5SGx9eKAQ
-C62KdHq4/zKFZDaQ3DjdkG4HgnCuX04f9EaY85i0+ghlv9bsqbp+qPBAotAay+b8
-lcVUY5JbuAo2zCbTK60W/0lT9GV8ZCea9RJ1EaUMWuoe5DHzpivbDkpdqkc68F4q
-1W90tvi8mnPQ+oq+qGlHmwdF2bXNHJvFQZplzJmgvb+16J9mX2nJbchoUGh0ro4S
-fpwkT9wFYbeKbSqVQ9k//tjJp65jzTDVlYQYLRK1Lab+N910uPilWRiPyveuYw2d
-ZlF9nEBIm6ECAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3Bl
-blNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFLJQtMY4j7rEO2lM
-a0V8zwhINgLgMB8GA1UdIwQYMBaAFJ6SRL+BZ5aim35u5qa7nSna5zgKMA0GCSqG
-SIb3DQEBBQUAA4IBAQCqq9Qn48tCBVX9JLPlVX37zmz/x5bwfTChU0oE66QkXpbu
-Ze/lqghHnaqVKrtqKJ9RYmPZfRqBoHL3nzNrO/Tchc0q7oOpkz11U5H6CxsQgxEs
-A06sv8PmJXSfFBNKQ2bC1xxslD6m86W9ASyfICkuYoIS2ItwG4grGGhaRYBGKmrV
-3x/T6Fc5Cr4a2LA+5bbDabdewHuzqKZ47go9oHRA+0Kf9Jh/R8wVKOuxlXeCqGWb
-RsNP+fRyvr0kKFwNs4nkE3HIp1QbJhXzwbKpE3dUwrmwxyQ5AEwap5vnrUo6MsKB
-DRMtJ+qYAKkOnjg7j4A0Fxc9SX70pRkFKAh93tMf
------END CERTIFICATE-----
diff --git a/src/leap/testing/leaptestskey.pem b/src/leap/testing/leaptestskey.pem
deleted file mode 100644
index fe6291a1..00000000
--- a/src/leap/testing/leaptestskey.pem
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEAvPHEBc5L1Zua+sGlDIkVfgVptqRiODrWFEo2qjwxcFQuv30F
-Ga17DKmmfUa+g2LL6rlIbH14oBALrYp0erj/MoVkNpDcON2QbgeCcK5fTh/0Rpjz
-mLT6CGW/1uypun6o8ECi0BrL5vyVxVRjklu4CjbMJtMrrRb/SVP0ZXxkJ5r1EnUR
-pQxa6h7kMfOmK9sOSl2qRzrwXirVb3S2+Lyac9D6ir6oaUebB0XZtc0cm8VBmmXM
-maC9v7Xon2ZfacltyGhQaHSujhJ+nCRP3AVht4ptKpVD2T/+2MmnrmPNMNWVhBgt
-ErUtpv433XS4+KVZGI/K965jDZ1mUX2cQEiboQIDAQABAoIBAQCh/+yhSbrtoCgm
-PegEsnix/3QfPBxWt+Obq/HozglZlWQrnMbFuF+bgM4V9ZUdU5UhYNF+66mEG53X
-orGyE3IDYCmHO3cGbroKDPhDIs7mTjGEYlniIbGLh6oPXgU8uKKis9ik84TGPOUx
-NuTUtT07zLYHx+FX3DLwLUKLzTaWWSRgA7nxNwCY8aPqDxCkXEyZHvSlm9KYZnhe
-nVevycoHR+chxL6X/ebbBt2FKR7tl4328mlDXvMXr0vahPH94CuXEvfTj+f6ZxZF
-OctdikyRfd8O3ebrUw0XjafPYyTsDMH0/rQovEBVlecEHqh6Z9dBFlogRq5DSun9
-jem4bBXRAoGBAPGPi4g21pTQPqTFxpqea8TsPqIfo3csfMDPdzT246MxzALHqCfG
-yZi4g2JYJrReSWHulZDORO5skSKNEb5VTA/3xFhKLt8CULZOakKBDLkzRXlnDFXg
-Jsu9vtjDWjQcJsdsRx1tc5V6s+hmel70aaUu/maUlEYZnyIXaTe+1SB1AoGBAMg9
-EMEO5YN52pOI5qPH8j7uyVKtZWKRiR6jb5KA5TxWqZalSdPV6YwDqV/e+HjWrZNw
-kSEFONY0seKpIHwXchx91aym7rDHUgOoBQfCWufRMYvRXLhfOTBu4X+U52++i8wt
-FvKgh6eSmc7VayAaDfHp7yfrIfS03IiN0T35mGj9AoGAPCoXg7a83VW8tId5/trE
-VsjMlM6yhSU0cUV7GFsBuYzWlj6qODX/0iTqvFzeTwBI4LZu1CE78/Jgd62RJMnT
-5wo8Ag1//RVziuSe/K9tvtbxT9qFrQHmR8qbtRt65Q257uOeFstDBZEJLDIR+oJ/
-qZ+5x0zsXUVWaERSdYr3RF0CgYEApKDgN3oB5Ti4Jnh1984aMver+heptYKmU9RX
-lQH4dsVhpQO8UTgcTgtso+/0JZWLHB9+ksFyW1rzrcETfjLglOA4XzzYHeuiWHM5
-v4lhqBpsO+Ij80oHAPUI3RYVud/VnEauCUlGftWfM1hwPPJu6KhHAnDleAWDE5pV
-oDinwBkCgYEAnn/OceaqA2fNYp1IRegbFzpewjUlHLq3bXiCIVhO7W/HqsdfUxjE
-VVdjEno/pAG7ZCO5j8u+rLkG2ZIVY3qsUENUiXz52Q08qEltgM8nfirK7vIQkfd9
-YISRE3QHYJd+ArY4v+7rNeF1O5eIEyzPAbvG5raeZFcZ6POxy66uWKo=
------END RSA PRIVATE KEY-----
diff --git a/src/leap/testing/pyqt.py b/src/leap/testing/pyqt.py
deleted file mode 100644
index 6edaf059..00000000
--- a/src/leap/testing/pyqt.py
+++ /dev/null
@@ -1,52 +0,0 @@
-from PyQt4 import QtCore
-
-_oldConnect = QtCore.QObject.connect
-_oldDisconnect = QtCore.QObject.disconnect
-_oldEmit = QtCore.QObject.emit
-
-
-def _wrapConnect(callableObject):
- """
- Returns a wrapped call to the old version of QtCore.QObject.connect
- """
- @staticmethod
- def call(*args):
- callableObject(*args)
- _oldConnect(*args)
- return call
-
-
-def _wrapDisconnect(callableObject):
- """
- Returns a wrapped call to the old version of QtCore.QObject.disconnect
- """
- @staticmethod
- def call(*args):
- callableObject(*args)
- _oldDisconnect(*args)
- return call
-
-
-def enableSignalDebugging(**kwargs):
- """
- Call this to enable Qt Signal debugging. This will trap all
- connect, and disconnect calls.
- """
-
- f = lambda *args: None
- connectCall = kwargs.get('connectCall', f)
- disconnectCall = kwargs.get('disconnectCall', f)
- emitCall = kwargs.get('emitCall', f)
-
- def printIt(msg):
- def call(*args):
- print msg, args
- return call
- QtCore.QObject.connect = _wrapConnect(connectCall)
- QtCore.QObject.disconnect = _wrapDisconnect(disconnectCall)
-
- def new_emit(self, *args):
- emitCall(self, *args)
- _oldEmit(self, *args)
-
- QtCore.QObject.emit = new_emit
diff --git a/src/leap/testing/qunittest.py b/src/leap/testing/qunittest.py
deleted file mode 100644
index b89ccec3..00000000
--- a/src/leap/testing/qunittest.py
+++ /dev/null
@@ -1,302 +0,0 @@
-# -*- coding: utf-8 -*-
-
-# **qunittest** is an standard Python `unittest` enhancement for PyQt4,
-# allowing
-# you to test asynchronous code using standard synchronous testing facility.
-#
-# The source for `qunittest` is available on [GitHub][gh], and released under
-# the MIT license.
-#
-# Slightly modified by The Leap Project.
-
-### Prerequisites
-
-# Import unittest2 or unittest
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
-# ... and some standard Python libraries
-import sys
-import functools
-import contextlib
-import re
-
-# ... and several PyQt classes
-from PyQt4.QtCore import QTimer
-from PyQt4.QtTest import QTest
-from PyQt4 import QtGui
-
-### The code
-
-
-# Override standard main method, by invoking it inside PyQt event loop
-
-def main(*args, **kwargs):
- qapplication = QtGui.QApplication(sys.argv)
-
- QTimer.singleShot(0, unittest.main(*args, **kwargs))
- qapplication.exec_()
-
-"""
-This main substitute does not integrate with unittest.
-
-Note about mixing the event loop and unittests:
-
-Unittest will fail if we keep more than one reference to a QApplication.
-(pyqt expects to be and only one).
-So, for the things that need a QApplication to exist, do something like:
-
- self.app = QApplication()
- QtGui.qApp = self.app
-
-in the class setUp, and::
-
- QtGui.qApp = None
- self.app = None
-
-in the class tearDown.
-
-For some explanation about this, see
- http://stuvel.eu/blog/127/multiple-instances-of-qapplication-in-one-process
-and
- http://www.riverbankcomputing.com/pipermail/pyqt/2010-September/027705.html
-"""
-
-
-# Helper returning the name of a given signal
-
-def _signal_name(signal):
- s = repr(signal)
- name_re = "signal (\w+) of (\w+)"
- match = re.search(name_re, s, re.I)
- if not match:
- return "??"
- return "%s#%s" % (match.group(2), match.group(1))
-
-
-class _SignalConnector(object):
- """ Encapsulates signal assertion testing """
- def __init__(self, test, signal, callable_):
- self.test = test
- self.callable_ = callable_
- self.called_with = None
- self.emited = False
- self.signal = signal
- self._asserted = False
-
- signal.connect(self.on_signal_emited)
-
- # Store given parameters and mark signal as `emited`
- def on_signal_emited(self, *args, **kwargs):
- self.called_with = (args, kwargs)
- self.emited = True
-
- def assertEmission(self):
- # Assert once wheter signal was emited or not
- was_asserted = self._asserted
- self._asserted = True
-
- if not was_asserted:
- if not self.emited:
- self.test.fail(
- "signal %s not emited" % (_signal_name(self.signal)))
-
- # Call given callable is necessary
- if self.callable_:
- args, kwargs = self.called_with
- self.callable_(*args, **kwargs)
-
- def __enter__(self):
- # Assert emission when context is entered
- self.assertEmission()
- return self.called_with
-
- def __exit__(self, *_):
- return False
-
-### Unit Testing
-
-# `qunittest` does not force much abould how test should look - it just adds
-# several helpers for asynchronous code testing.
-#
-# Common test case may look like this:
-#
-# import qunittest
-# from calculator import Calculator
-#
-# class TestCalculator(qunittest.TestCase):
-# def setUp(self):
-# self.calc = Calculator()
-#
-# def test_should_add_two_numbers_synchronously(self):
-# # given
-# a, b = 2, 3
-#
-# # when
-# r = self.calc.add(a, b)
-#
-# # then
-# self.assertEqual(5, r)
-#
-# def test_should_calculate_factorial_in_background(self):
-# # given
-#
-# # when
-# self.calc.factorial(20)
-#
-# # then
-# self.assertEmited(self.calc.done) with (args, kwargs):
-# self.assertEqual([2432902008176640000], args)
-#
-# if __name__ == "__main__":
-# main()
-#
-# Test can be run by typing:
-#
-# python test_calculator.py
-#
-# Automatic test discovery is not supported now, because testing PyQt needs
-# an instance of `QApplication` and its `exec_` method is blocking.
-#
-
-
-### TestCase class
-
-class TestCase(unittest.TestCase):
- """
- Extends standard `unittest.TestCase` with several PyQt4 testing features
- useful for asynchronous testing.
- """
- def __init__(self, *args, **kwargs):
- super(TestCase, self).__init__(*args, **kwargs)
-
- self._clearSignalConnectors()
- self._succeeded = False
- self.addCleanup(self._clearSignalConnectors)
- self.tearDown = self._decorateTearDown(self.tearDown)
-
- ### Protected methods
-
- def _clearSignalConnectors(self):
- self._connectedSignals = []
-
- def _decorateTearDown(self, tearDown):
- @functools.wraps(tearDown)
- def decorator():
- self._ensureEmitedSignals()
- return tearDown()
- return decorator
-
- def _ensureEmitedSignals(self):
- """
- Checks if signals were acually emited. Raises AssertionError if no.
- """
- # TODO: add information about line
- for signal in self._connectedSignals:
- signal.assertEmission()
-
- ### Assertions
-
- def assertEmited(self, signal, callable_=None, timeout=1):
- """
- Asserts if given `signal` was emited. Waits 1 second by default,
- before asserts signal emission.
-
- If `callable_` is given, it should be a function which takes two
- arguments: `args` and `kwargs`. It will be called after blocking
- operation or when assertion about signal emission is made and
- signal was emited.
-
- When timeout is not `False`, method call is blocking, and ends
- after `timeout` seconds. After that time, it validates wether
- signal was emited.
-
- When timeout is `False`, method is non blocking, and test should wait
- for signals afterwards. Otherwise, at the end of the test, all
- signal emissions are checked if appeared.
-
- Function returns context, which yields to list of parameters given
- to signal. It can be useful for testing given parameters. Following
- code:
-
- with self.assertEmited(widget.signal) as (args, kwargs):
- self.assertEqual(1, len(args))
- self.assertEqual("Hello World!", args[0])
-
- will wait 1 second and test for correct parameters, is signal was
- emtied.
-
- Note that code:
-
- with self.assertEmited(widget.signal, timeout=False) as (a, k):
- # Will not be invoked
-
- will always fail since signal cannot be emited in the time of its
- connection - code inside the context will not be invoked at all.
- """
-
- connector = _SignalConnector(self, signal, callable_)
- self._connectedSignals.append(connector)
- if timeout:
- self.waitFor(timeout)
- connector.assertEmission()
-
- return connector
-
- ### Helper methods
-
- @contextlib.contextmanager
- def invokeAfter(self, seconds, callable_=None):
- """
- Waits given amount of time and executes the context.
-
- If `callable_` is given, executes it, instead of context.
- """
- self.waitFor(seconds)
- if callable_:
- callable_()
- else:
- yield
-
- def waitFor(self, seconds):
- """
- Waits given amount of time.
-
- self.widget.loadImage(url)
- self.waitFor(seconds=10)
- """
- QTest.qWait(seconds * 1000)
-
- def succeed(self, bool_=True):
- """ Marks test as suceeded for next `failAfter()` invocation. """
- self._succeeded = self._succeeded or bool_
-
- def failAfter(self, seconds, message=None):
- """
- Waits given amount of time, and fails the test if `succeed(bool)`
- is not called - in most common case, `succeed(bool)` should be called
- asynchronously (in signal handler):
-
- self.widget.signal.connect(lambda: self.succeed())
- self.failAfter(1, "signal not emited?")
-
- After invocation, test is no longer consider as succeeded.
- """
- self.waitFor(seconds)
- if not self._succeeded:
- self.fail(message)
-
- self._succeeded = False
-
-### Credits
-#
-# * **Who is responsible:** [Dawid Fatyga][df]
-# * **Source:** [GitHub][gh]
-# * **Doc. generator:** [rocco][ro]
-#
-# [gh]: https://www.github.com/dejw/qunittest
-# [df]: https://github.com/dejw
-# [ro]: http://rtomayko.github.com/rocco/
-#
diff --git a/src/leap/testing/test_basetest.py b/src/leap/testing/test_basetest.py
deleted file mode 100644
index 14d8f8a3..00000000
--- a/src/leap/testing/test_basetest.py
+++ /dev/null
@@ -1,91 +0,0 @@
-"""becase it's oh so meta"""
-try:
- import unittest2 as unittest
-except ImportError:
- import unittest
-
-import os
-import StringIO
-
-from leap.testing.basetest import BaseLeapTest
-
-# global for tempdir checking
-_tempdir = None
-
-
-class _TestCaseRunner(object):
- def run_testcase(self, testcase=None):
- if not testcase:
- return None
- loader = unittest.TestLoader()
- suite = loader.loadTestsFromTestCase(testcase)
-
- # Create runner, and run testcase
- io = StringIO.StringIO()
- runner = unittest.TextTestRunner(stream=io)
- results = runner.run(suite)
- return results
-
-
-class TestAbstractBaseLeapTest(unittest.TestCase, _TestCaseRunner):
-
- def test_abstract_base_class(self):
- class _BaseTest(BaseLeapTest):
- def test_dummy_method(self):
- pass
-
- def test_tautology(self):
- assert True
-
- results = self.run_testcase(_BaseTest)
-
- # should be 2 errors: NotImplemented
- # raised for setUp/tearDown
- self.assertEquals(results.testsRun, 2)
- self.assertEquals(len(results.failures), 0)
- self.assertEquals(len(results.errors), 2)
-
-
-class TestInitBaseLeapTest(BaseLeapTest):
-
- def setUp(self):
- pass
-
- def tearDown(self):
- pass
-
- def test_path_is_changed(self):
- os_path = os.environ['PATH']
- self.assertTrue(os_path.startswith(self.tempdir))
-
- def test_old_path_is_saved(self):
- self.assertTrue(len(self.old_path) > 1)
-
-
-class TestCleanedBaseLeapTest(unittest.TestCase, _TestCaseRunner):
-
- def test_tempdir_is_cleaned_after_tests(self):
- class _BaseTest(BaseLeapTest):
- def setUp(self):
- global _tempdir
- _tempdir = self.tempdir
-
- def tearDown(self):
- pass
-
- def test_tempdir_created(self):
- self.assertTrue(os.path.isdir(self.tempdir))
-
- def test_tempdir_created_on_setupclass(self):
- self.assertEqual(_tempdir, self.tempdir)
-
- results = self.run_testcase(_BaseTest)
- self.assertEquals(results.testsRun, 2)
- self.assertEquals(len(results.failures), 0)
- self.assertEquals(len(results.errors), 0)
-
- # did we cleaned the tempdir?
- self.assertFalse(os.path.isdir(_tempdir))
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/util/__init__.py b/src/leap/util/__init__.py
deleted file mode 100644
index a70a9a8b..00000000
--- a/src/leap/util/__init__.py
+++ /dev/null
@@ -1,9 +0,0 @@
-import logging
-logger = logging.getLogger(__name__)
-
-try:
- import pygeoip
- HAS_GEOIP = True
-except ImportError:
- logger.debug('PyGeoIP not found. Disabled Geo support.')
- HAS_GEOIP = False
diff --git a/src/leap/util/certs.py b/src/leap/util/certs.py
deleted file mode 100644
index f0f790e9..00000000
--- a/src/leap/util/certs.py
+++ /dev/null
@@ -1,18 +0,0 @@
-import os
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-def get_mac_cabundle():
- # hackaround bundle error
- # XXX this needs a better fix!
- f = os.path.split(__file__)[0]
- sep = os.path.sep
- f_ = sep.join(f.split(sep)[:-2])
- verify = os.path.join(f_, 'cacert.pem')
- #logger.error('VERIFY PATH = %s' % verify)
- exists = os.path.isfile(verify)
- #logger.error('do exist? %s', exists)
- if exists:
- return verify
diff --git a/src/leap/util/coroutines.py b/src/leap/util/coroutines.py
deleted file mode 100644
index 0657fc04..00000000
--- a/src/leap/util/coroutines.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# the problem of watching a stdout pipe from
-# openvpn binary: using subprocess and coroutines
-# acting as event consumers
-
-from __future__ import division, print_function
-
-import logging
-from subprocess import PIPE, Popen
-import sys
-from threading import Thread
-
-logger = logging.getLogger(__name__)
-
-ON_POSIX = 'posix' in sys.builtin_module_names
-
-
-#
-# Coroutines goodies
-#
-
-def coroutine(func):
- def start(*args, **kwargs):
- cr = func(*args, **kwargs)
- cr.next()
- return cr
- return start
-
-
-@coroutine
-def process_events(callback):
- """
- coroutine loop that receives
- events sent and dispatch the callback.
- :param callback: callback to be called\
-for each event
- :type callback: callable
- """
- try:
- while True:
- m = (yield)
- if callable(callback):
- callback(m)
- else:
- logger.debug('not a callable passed')
- except GeneratorExit:
- return
-
-#
-# Threads
-#
-
-
-def launch_thread(target, args):
- """
- launch and demonize thread.
- :param target: target function that will run in thread
- :type target: function
- :param args: args to be passed to thread
- :type args: list
- """
- t = Thread(target=target,
- args=args)
- t.daemon = True
- t.start()
- return t
-
-
-def watch_output(out, observers):
- """
- initializes dict of observer coroutines
- and pushes lines to each of them as they are received
- from the watched output.
- :param out: stdout of a process.
- :type out: fd
- :param observers: tuple of coroutines to send data\
-for each event
- :type observers: tuple
- """
- observer_dict = dict(((observer, process_events(observer))
- for observer in observers))
- for line in iter(out.readline, b''):
- for obs in observer_dict:
- observer_dict[obs].send(line)
- out.close()
-
-
-def spawn_and_watch_process(command, args, observers=None):
- """
- spawns a subprocess with command, args, and launch
- a watcher thread.
- :param command: command to be executed in the subprocess
- :type command: str
- :param args: arguments
- :type args: list
- :param observers: tuple of observer functions to be called \
-for each line in the subprocess output.
- :type observers: tuple
- :return: a tuple containing the child process instance, and watcher_thread,
- :rtype: (Subprocess, Thread)
- """
- subp = Popen([command] + args,
- stdout=PIPE,
- stderr=PIPE,
- bufsize=1,
- close_fds=ON_POSIX)
- watcher = launch_thread(
- watch_output,
- (subp.stdout, observers))
- return subp, watcher
diff --git a/src/leap/util/dicts.py b/src/leap/util/dicts.py
deleted file mode 100644
index 001ca96b..00000000
--- a/src/leap/util/dicts.py
+++ /dev/null
@@ -1,268 +0,0 @@
-# Backport of OrderedDict() class that runs
-# on Python 2.4, 2.5, 2.6, 2.7 and pypy.
-# Passes Python2.7's test suite and incorporates all the latest updates.
-
-try:
- from thread import get_ident as _get_ident
-except ImportError:
- from dummy_thread import get_ident as _get_ident
-
-try:
- from _abcoll import KeysView, ValuesView, ItemsView
-except ImportError:
- pass
-
-
-class OrderedDict(dict):
- 'Dictionary that remembers insertion order'
- # An inherited dict maps keys to values.
- # The inherited dict provides __getitem__, __len__, __contains__, and get.
- # The remaining methods are order-aware.
- # Big-O running times for all methods are the same as for regular
- # dictionaries.
-
- # The internal self.__map dictionary maps keys to links in a doubly
- # linked list.
- # The circular doubly linked list starts and ends with a sentinel element.
- # The sentinel element never gets deleted (this simplifies the algorithm).
- # Each link is stored as a list of length three: [PREV, NEXT, KEY].
-
- def __init__(self, *args, **kwds):
- '''Initialize an ordered dictionary. Signature is the same as for
- regular dictionaries, but keyword arguments are not recommended
- because their insertion order is arbitrary.
-
- '''
- if len(args) > 1:
- raise TypeError('expected at most 1 arguments, got %d' % len(args))
- try:
- self.__root
- except AttributeError:
- self.__root = root = [] # sentinel node
- root[:] = [root, root, None]
- self.__map = {}
- self.__update(*args, **kwds)
-
- def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
- 'od.__setitem__(i, y) <==> od[i]=y'
- # Setting a new item creates a new link which goes at the end
- # of the linked list, and the inherited dictionary is updated
- # with the new key/value pair.
- if key not in self:
- root = self.__root
- last = root[0]
- last[1] = root[0] = self.__map[key] = [last, root, key]
- dict_setitem(self, key, value)
-
- def __delitem__(self, key, dict_delitem=dict.__delitem__):
- 'od.__delitem__(y) <==> del od[y]'
- # Deleting an existing item uses self.__map to find the link which is
- # then removed by updating the links in the predecessor and successor
- # nodes.
- dict_delitem(self, key)
- link_prev, link_next, key = self.__map.pop(key)
- link_prev[1] = link_next
- link_next[0] = link_prev
-
- def __iter__(self):
- 'od.__iter__() <==> iter(od)'
- root = self.__root
- curr = root[1]
- while curr is not root:
- yield curr[2]
- curr = curr[1]
-
- def __reversed__(self):
- 'od.__reversed__() <==> reversed(od)'
- root = self.__root
- curr = root[0]
- while curr is not root:
- yield curr[2]
- curr = curr[0]
-
- def clear(self):
- 'od.clear() -> None. Remove all items from od.'
- try:
- for node in self.__map.itervalues():
- del node[:]
- root = self.__root
- root[:] = [root, root, None]
- self.__map.clear()
- except AttributeError:
- pass
- dict.clear(self)
-
- def popitem(self, last=True):
- '''od.popitem() -> (k, v), return and remove a (key, value) pair.
- Pairs are returned in LIFO order if last is true or FIFO order if
- false.
- '''
- if not self:
- raise KeyError('dictionary is empty')
- root = self.__root
- if last:
- link = root[0]
- link_prev = link[0]
- link_prev[1] = root
- root[0] = link_prev
- else:
- link = root[1]
- link_next = link[1]
- root[1] = link_next
- link_next[0] = root
- key = link[2]
- del self.__map[key]
- value = dict.pop(self, key)
- return key, value
-
- # -- the following methods do not depend on the internal structure --
-
- def keys(self):
- 'od.keys() -> list of keys in od'
- return list(self)
-
- def values(self):
- 'od.values() -> list of values in od'
- return [self[key] for key in self]
-
- def items(self):
- 'od.items() -> list of (key, value) pairs in od'
- return [(key, self[key]) for key in self]
-
- def iterkeys(self):
- 'od.iterkeys() -> an iterator over the keys in od'
- return iter(self)
-
- def itervalues(self):
- 'od.itervalues -> an iterator over the values in od'
- for k in self:
- yield self[k]
-
- def iteritems(self):
- 'od.iteritems -> an iterator over the (key, value) items in od'
- for k in self:
- yield (k, self[k])
-
- def update(*args, **kwds):
- '''od.update(E, **F) -> None. Update od from dict/iterable E and F.
-
- If E is a dict instance, does: for k in E: od[k] = E[k]
- If E has a .keys() method, does: for k in E.keys():
- od[k] = E[k]
- Or if E is an iterable of items, does: for k, v in E: od[k] = v
- In either case, this is followed by: for k, v in F.items():
- od[k] = v
- '''
-
- if len(args) > 2:
- raise TypeError('update() takes at most 2 positional '
- 'arguments (%d given)' % (len(args),))
- elif not args:
- raise TypeError('update() takes at least 1 argument (0 given)')
- self = args[0]
- # Make progressively weaker assumptions about "other"
- other = ()
- if len(args) == 2:
- other = args[1]
- if isinstance(other, dict):
- for key in other:
- self[key] = other[key]
- elif hasattr(other, 'keys'):
- for key in other.keys():
- self[key] = other[key]
- else:
- for key, value in other:
- self[key] = value
- for key, value in kwds.items():
- self[key] = value
-
- __update = update # let subclasses override update
- # without breaking __init__
-
- __marker = object()
-
- def pop(self, key, default=__marker):
- '''od.pop(k[,d]) -> v
- remove specified key and return the corresponding value.
- If key is not found, d is returned if given,
- otherwise KeyError is raised.
-
- '''
- if key in self:
- result = self[key]
- del self[key]
- return result
- if default is self.__marker:
- raise KeyError(key)
- return default
-
- def setdefault(self, key, default=None):
- 'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
- if key in self:
- return self[key]
- self[key] = default
- return default
-
- def __repr__(self, _repr_running={}):
- 'od.__repr__() <==> repr(od)'
- call_key = id(self), _get_ident()
- if call_key in _repr_running:
- return '...'
- _repr_running[call_key] = 1
- try:
- if not self:
- return '%s()' % (self.__class__.__name__,)
- return '%s(%r)' % (self.__class__.__name__, self.items())
- finally:
- del _repr_running[call_key]
-
- def __reduce__(self):
- 'Return state information for pickling'
- items = [[k, self[k]] for k in self]
- inst_dict = vars(self).copy()
- for k in vars(OrderedDict()):
- inst_dict.pop(k, None)
- if inst_dict:
- return (self.__class__, (items,), inst_dict)
- return self.__class__, (items,)
-
- def copy(self):
- 'od.copy() -> a shallow copy of od'
- return self.__class__(self)
-
- @classmethod
- def fromkeys(cls, iterable, value=None):
- '''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
- and values equal to v (which defaults to None).
-
- '''
- d = cls()
- for key in iterable:
- d[key] = value
- return d
-
- def __eq__(self, other):
- '''od.__eq__(y) <==> od==y.
- Comparison to another OD is order-sensitive
- while comparison to a regular mapping is order-insensitive.
- '''
- if isinstance(other, OrderedDict):
- return len(self) == len(other) and self.items() == other.items()
- return dict.__eq__(self, other)
-
- def __ne__(self, other):
- return not self == other
-
- # -- the following methods are only used in Python 2.7 --
-
- def viewkeys(self):
- "od.viewkeys() -> a set-like object providing a view on od's keys"
- return KeysView(self)
-
- def viewvalues(self):
- "od.viewvalues() -> an object providing a view on od's values"
- return ValuesView(self)
-
- def viewitems(self):
- "od.viewitems() -> a set-like object providing a view on od's items"
- return ItemsView(self)
diff --git a/src/leap/util/fileutil.py b/src/leap/util/fileutil.py
deleted file mode 100644
index 820ffe46..00000000
--- a/src/leap/util/fileutil.py
+++ /dev/null
@@ -1,120 +0,0 @@
-import errno
-from itertools import chain
-import logging
-import os
-import platform
-import stat
-
-
-logger = logging.getLogger()
-
-
-def is_user_executable(fpath):
- st = os.stat(fpath)
- return bool(st.st_mode & stat.S_IXUSR)
-
-
-def extend_path():
- ourplatform = platform.system()
- if ourplatform == "Linux":
- return "/usr/local/sbin:/usr/sbin"
- # XXX add mac / win extended search paths?
-
-
-def which(program, path=None):
- """
- an implementation of which
- that extends the path with
- other locations, like sbin
- (f.i., openvpn binary is likely to be there)
- @param program: a string representing the binary we're looking for.
- """
- def is_exe(fpath):
- """
- check that path exists,
- it's a file,
- and is executable by the owner
- """
- # we would check for access,
- # but it's likely that we're
- # using uid 0 + polkitd
-
- return os.path.isfile(fpath)\
- and is_user_executable(fpath)
-
- def ext_candidates(fpath):
- yield fpath
- for ext in os.environ.get("PATHEXT", "").split(os.pathsep):
- yield fpath + ext
-
- def iter_path(pathset):
- """
- returns iterator with
- full path for a given path list
- and the current target bin.
- """
- for path in pathset.split(os.pathsep):
- exe_file = os.path.join(path, program)
- #print 'file=%s' % exe_file
- for candidate in ext_candidates(exe_file):
- if is_exe(candidate):
- yield candidate
-
- fpath, fname = os.path.split(program)
- if fpath:
- if is_exe(program):
- return program
- else:
- # extended iterator
- # with extra path
- if path is None:
- path = os.environ['PATH']
- extended_path = chain(
- iter_path(path),
- iter_path(extend_path()))
- for candidate in extended_path:
- if candidate is not None:
- return candidate
-
- # sorry bro.
- return None
-
-
-def mkdir_p(path):
- """
- implements mkdir -p functionality
- """
- try:
- os.makedirs(path)
- except OSError as exc:
- if exc.errno == errno.EEXIST:
- pass
- else:
- raise
-
-
-def mkdir_f(path):
- folder, fname = os.path.split(path)
- mkdir_p(folder)
-
-
-def check_and_fix_urw_only(_file):
- """
- test for 600 mode and try
- to set it if anything different found
- """
- mode = stat.S_IMODE(
- os.stat(_file).st_mode)
-
- if mode != int('600', 8):
- try:
- logger.warning(
- 'bad permission on %s '
- 'attempting to set 600',
- _file)
- os.chmod(_file, stat.S_IRUSR | stat.S_IWUSR)
- except OSError:
- logger.error(
- 'error while trying to chmod 600 %s',
- _file)
- raise
diff --git a/src/leap/util/geo.py b/src/leap/util/geo.py
deleted file mode 100644
index 54b29596..00000000
--- a/src/leap/util/geo.py
+++ /dev/null
@@ -1,32 +0,0 @@
-"""
-experimental geo support.
-not yet a feature.
-in debian, we rely on the (optional) geoip-database
-"""
-import os
-import platform
-
-from leap.util import HAS_GEOIP
-
-GEOIP = None
-
-if HAS_GEOIP:
- import pygeoip # we know we can :)
-
- GEOIP_PATH = None
-
- if platform.system() == "Linux":
- PATH = "/usr/share/GeoIP/GeoIP.dat"
- if os.path.isfile(PATH):
- GEOIP_PATH = PATH
- GEOIP = pygeoip.GeoIP(GEOIP_PATH, pygeoip.MEMORY_CACHE)
-
-
-def get_country_name(ip):
- if not GEOIP:
- return
- try:
- country = GEOIP.country_name_by_addr(ip)
- except pygeoip.GeoIPError:
- country = None
- return country if country else "-"
diff --git a/src/leap/util/leap_argparse.py b/src/leap/util/leap_argparse.py
deleted file mode 100644
index 3412a72c..00000000
--- a/src/leap/util/leap_argparse.py
+++ /dev/null
@@ -1,44 +0,0 @@
-import argparse
-
-
-def build_parser():
- """
- all the options for the leap arg parser
- Some of these could be switched on only if debug flag is present!
- """
- epilog = "Copyright 2012 The LEAP Encryption Access Project"
- parser = argparse.ArgumentParser(description="""
-Launches the LEAP Client""", epilog=epilog)
- parser.add_argument('-d', '--debug', action="store_true",
- help=("Launches client in debug mode, writing debug"
- "info to stdout"))
- parser.add_argument('-l', '--logfile', metavar="LOG FILE", nargs='?',
- action="store", dest="log_file",
- #type=argparse.FileType('w'),
- help='optional log file')
- parser.add_argument('--openvpn-verbosity', nargs='?',
- type=int,
- action="store", dest="openvpn_verb",
- help='verbosity level for openvpn logs [1-6]')
-
- # Not in use, we might want to reintroduce them.
- #parser.add_argument('-i', '--no-provider-checks',
- #action="store_true", default=False,
- #help="skips download of provider config files. gets "
- #"config from local files only. Will fail if cannot "
- #"find any")
- #parser.add_argument('-k', '--no-ca-verify',
- #action="store_true", default=False,
- #help="(insecure). Skips verification of the server "
- #"certificate used in TLS handshake.")
- #parser.add_argument('-c', '--config', metavar="CONFIG FILE", nargs='?',
- #action="store", dest="config_file",
- #type=argparse.FileType('r'),
- #help='optional config file')
- return parser
-
-
-def init_leapc_args():
- parser = build_parser()
- opts, unknown = parser.parse_known_args()
- return parser, opts
diff --git a/src/leap/util/misc.py b/src/leap/util/misc.py
deleted file mode 100644
index d869a1ba..00000000
--- a/src/leap/util/misc.py
+++ /dev/null
@@ -1,37 +0,0 @@
-"""
-misc utils
-"""
-import psutil
-
-from leap.base.constants import OPENVPN_BIN
-
-
-class ImproperlyConfigured(Exception):
- """
- """
-
-
-def null_check(value, value_name):
- try:
- assert value is not None
- except AssertionError:
- raise ImproperlyConfigured(
- "%s parameter cannot be None" % value_name)
-
-
-def get_openvpn_pids():
- # binary name might change
-
- openvpn_pids = []
- for p in psutil.process_iter():
- try:
- # XXX Not exact!
- # Will give false positives.
- # we should check that cmdline BEGINS
- # with openvpn or with our wrapper
- # (pkexec / osascript / whatever)
- if OPENVPN_BIN in ' '.join(p.cmdline):
- openvpn_pids.append(p.pid)
- except psutil.error.AccessDenied:
- pass
- return openvpn_pids
diff --git a/src/leap/util/tests/__init__.py b/src/leap/util/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/src/leap/util/tests/__init__.py
+++ /dev/null
diff --git a/src/leap/util/tests/test_fileutil.py b/src/leap/util/tests/test_fileutil.py
deleted file mode 100644
index f5131b3d..00000000
--- a/src/leap/util/tests/test_fileutil.py
+++ /dev/null
@@ -1,100 +0,0 @@
-import os
-import platform
-import shutil
-import stat
-import tempfile
-import unittest
-
-from leap.util import fileutil
-
-
-class FileUtilTest(unittest.TestCase):
- """
- test our file utils
- """
-
- def setUp(self):
- self.system = platform.system()
- self.create_temp_dir()
-
- def tearDown(self):
- self.remove_temp_dir()
-
- #
- # helpers
- #
-
- def create_temp_dir(self):
- self.tmpdir = tempfile.mkdtemp()
-
- def remove_temp_dir(self):
- shutil.rmtree(self.tmpdir)
-
- def get_file_path(self, filename):
- return os.path.join(
- self.tmpdir,
- filename)
-
- def touch_exec_file(self):
- fp = self.get_file_path('testexec')
- open(fp, 'w').close()
- os.chmod(
- fp,
- stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR)
- return fp
-
- def get_mode(self, fp):
- return stat.S_IMODE(os.stat(fp).st_mode)
-
- #
- # tests
- #
-
- def test_is_user_executable(self):
- """
- touch_exec_file creates in mode 700?
- """
- # XXX could check access X_OK
-
- fp = self.touch_exec_file()
- mode = self.get_mode(fp)
- self.assertEqual(mode, int('700', 8))
-
- def test_which(self):
- """
- which implementation ok?
- not a very reliable test,
- but I cannot think of anything smarter now
- I guess it's highly improbable that copy
- """
- # XXX yep, we can change the syspath
- # for the test... !
-
- if self.system == "Linux":
- self.assertEqual(
- fileutil.which('cp'),
- '/bin/cp')
-
- def test_mkdir_p(self):
- """
- our own mkdir -p implementation ok?
- """
- testdir = self.get_file_path(
- os.path.join('test', 'foo', 'bar'))
- self.assertEqual(os.path.isdir(testdir), False)
- fileutil.mkdir_p(testdir)
- self.assertEqual(os.path.isdir(testdir), True)
-
- def test_check_and_fix_urw_only(self):
- """
- ensure check_and_fix_urx_only ok?
- """
- fp = self.touch_exec_file()
- mode = self.get_mode(fp)
- self.assertEqual(mode, int('700', 8))
- fileutil.check_and_fix_urw_only(fp)
- mode = self.get_mode(fp)
- self.assertEqual(mode, int('600', 8))
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/util/tests/test_leap_argparse.py b/src/leap/util/tests/test_leap_argparse.py
deleted file mode 100644
index 4e2b811f..00000000
--- a/src/leap/util/tests/test_leap_argparse.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from argparse import Namespace
-import unittest
-
-from leap.util import leap_argparse
-
-
-class LeapArgParseTest(unittest.TestCase):
- """
- Test argparse options for eip client
- """
-
- def setUp(self):
- """
- get the parser
- """
- self.parser = leap_argparse.build_parser()
-
- def test_debug_mode(self):
- """
- test debug mode option
- """
- opts = self.parser.parse_args(
- ['--debug'])
- self.assertEqual(
- opts,
- Namespace(
- debug=True,
- log_file=None,
- #config_file=None,
- #no_provider_checks=False,
- #no_ca_verify=False,
- openvpn_verb=None))
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/util/tests/test_translations.py b/src/leap/util/tests/test_translations.py
deleted file mode 100644
index 794daeba..00000000
--- a/src/leap/util/tests/test_translations.py
+++ /dev/null
@@ -1,22 +0,0 @@
-import unittest
-
-from leap.util import translations
-
-
-class TrasnlationsTestCase(unittest.TestCase):
- """
- tests for translation functions and classes
- """
-
- def setUp(self):
- self.trClass = translations.LEAPTranslatable
-
- def test_trasnlatable(self):
- tr = self.trClass({"en": "house", "es": "casa"})
- eq = self.assertEqual
- eq(tr.tr(to="es"), "casa")
- eq(tr.tr(to="en"), "house")
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/src/leap/util/translations.py b/src/leap/util/translations.py
deleted file mode 100644
index f55c8fba..00000000
--- a/src/leap/util/translations.py
+++ /dev/null
@@ -1,82 +0,0 @@
-import inspect
-import logging
-
-from PyQt4.QtCore import QCoreApplication
-from PyQt4.QtCore import QLocale
-
-logger = logging.getLogger(__name__)
-
-"""
-here I could not do all that I wanted.
-the context is not getting passed to the xml file.
-Looks like pylupdate4 is somehow a hack that does not
-parse too well the python ast.
-I guess we could generate the xml for ourselves as a last recourse.
-"""
-
-# XXX BIG NOTE:
-# RESIST the temptation to get the translate function
-# more compact, or have the Context argument passed as a variable
-# Its name HAS to be explicit due to how the pylupdate parser
-# works.
-
-
-qtTranslate = QCoreApplication.translate
-
-
-def translate(*args, **kwargs):
- """
- our magic function.
- translate(Context, text, comment)
- """
- if len(args) == 1:
- obj = args[0]
- if isinstance(obj, LEAPTranslatable) and hasattr(obj, 'tr'):
- return obj.tr()
-
- klsname = None
- try:
- # get class value from instance
- # using live object inspection
- prev_frame = inspect.stack()[1][0]
- locals_ = inspect.getargvalues(prev_frame).locals
- self = locals_.get('self')
- if self:
-
- # Trying to get the class name
- # but this is useless, the parser
- # has already got the context.
- klsname = self.__class__.__name__
- #print 'KLSNAME -- ', klsname
- except:
- logger.error('error getting stack frame')
-
- if klsname and len(args) == 1:
- nargs = (klsname,) + args
- return qtTranslate(*nargs)
-
- else:
- return qtTranslate(*args)
-
-
-class LEAPTranslatable(dict):
- """
- An extended dict that implements a .tr method
- so it can be translated on the fly by our
- magic translate method
- """
-
- try:
- locale = str(QLocale.system().name()).split('_')[0]
- except:
- logger.warning("could not get system locale!")
- print "could not get system locale!"
- locale = "en"
-
- def tr(self, to=None):
- if not to:
- to = self.locale
- _tr = self.get(to, None)
- if not _tr:
- _tr = self.get("en", None)
- return _tr
diff --git a/src/leap/util/web.py b/src/leap/util/web.py
deleted file mode 100644
index 15de0561..00000000
--- a/src/leap/util/web.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-web related utilities
-"""
-
-
-class UsageError(Exception):
- """ """
-
-
-def get_https_domain_and_port(full_domain):
- """
- returns a tuple with domain and port
- from a full_domain string that can
- contain a colon
- """
- full_domain = unicode(full_domain)
- if full_domain is None:
- return None, None
-
- https_sch = "https://"
- http_sch = "http://"
-
- if full_domain.startswith(https_sch):
- full_domain = full_domain.lstrip(https_sch)
- elif full_domain.startswith(http_sch):
- raise UsageError(
- "cannot be called with a domain "
- "that begins with 'http://'")
-
- domain_split = full_domain.split(':')
- _len = len(domain_split)
- if _len == 1:
- domain, port = full_domain, 443
- elif _len == 2:
- domain, port = domain_split
- else:
- raise UsageError(
- "must be called with one only parameter"
- "in the form domain[:port]")
- return domain, port
diff --git a/tests/README b/tests/README
deleted file mode 100644
index 8745f03c..00000000
--- a/tests/README
+++ /dev/null
@@ -1 +0,0 @@
-write here general, integration tests.
diff --git a/tests/__init__.py b/tests/__init__.py
deleted file mode 100644
index e69de29b..00000000
--- a/tests/__init__.py
+++ /dev/null
diff --git a/tests/test_qt_environment.py b/tests/test_qt_environment.py
deleted file mode 100644
index e90d527f..00000000
--- a/tests/test_qt_environment.py
+++ /dev/null
@@ -1,43 +0,0 @@
-import sys
-import unittest
-
-import sip
-#sip.setapi('QVariant', 2)
-
-from PyQt4 import QtGui
-
-
-class TestWin(QtGui.QMainWindow):
- """
- a _really_ minimal test window,
- with only one tray icon
- """
- def __init__(self):
- super(TestWin, self).__init__()
- self.trayIcon = QtGui.QSystemTrayIcon(self)
-
-
-class QtEnvironTest(unittest.TestCase):
- """
- Test we're running a proper qt environment
- """
-
- def setUp(self):
- self.app = QtGui.QApplication(sys.argv)
- self.win = TestWin()
-
- def tearDown(self):
- del(self.win)
- del(self.app)
-
- def test_system_has_systray(self):
- """
- does system have systray available?
- """
- self.assertEqual(
- self.win.trayIcon.isSystemTrayAvailable(),
- True)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tox.ini b/tox.ini
deleted file mode 100644
index e7e1015f..00000000
--- a/tox.ini
+++ /dev/null
@@ -1,12 +0,0 @@
-[tox]
-envlist = py26
-
-[testenv]
-deps = -r{toxinidir}/pkg/requirements.pip
- -r{toxinidir}/pkg/test-requirements.pip
-sitepackages = True
-commands = xvfb-run nosetests leap --exclude-dir=src/leap/soledad --exclude-dir=src/leap/email/
-
-[testenv:pep8]
-deps = pep8==1.1
-commands = pep8 --repeat --show-source src/leap setup.py --ignore=E202,W602 --exclude=*_rc.py --repeat
diff --git a/versioneer.py b/versioneer.py
deleted file mode 100644
index 57d99419..00000000
--- a/versioneer.py
+++ /dev/null
@@ -1,656 +0,0 @@
-#! /usr/bin/python
-
-"""versioneer.py
-
-(like a rocketeer, but for versions)
-
-* https://github.com/warner/python-versioneer
-* Brian Warner
-* License: Public Domain
-* Version: 0.7+
-
-This file helps distutils-based projects manage their version number by just
-creating version-control tags.
-
-For developers who work from a VCS-generated tree (e.g. 'git clone' etc),
-each 'setup.py version', 'setup.py build', 'setup.py sdist' will compute a
-version number by asking your version-control tool about the current
-checkout. The version number will be written into a generated _version.py
-file of your choosing, where it can be included by your __init__.py
-
-For users who work from a VCS-generated tarball (e.g. 'git archive'), it will
-compute a version number by looking at the name of the directory created when
-te tarball is unpacked. This conventionally includes both the name of the
-project and a version number.
-
-For users who work from a tarball built by 'setup.py sdist', it will get a
-version number from a previously-generated _version.py file.
-
-As a result, loading code directly from the source tree will not result in a
-real version. If you want real versions from VCS trees (where you frequently
-update from the upstream repository, or do new development), you will need to
-do a 'setup.py version' after each update, and load code from the build/
-directory.
-
-You need to provide this code with a few configuration values:
-
- versionfile_source:
- A project-relative pathname into which the generated version strings
- should be written. This is usually a _version.py next to your project's
- main __init__.py file. If your project uses src/myproject/__init__.py,
- this should be 'src/myproject/_version.py'. This file should be checked
- in to your VCS as usual: the copy created below by 'setup.py
- update_files' will include code that parses expanded VCS keywords in
- generated tarballs. The 'build' and 'sdist' commands will replace it with
- a copy that has just the calculated version string.
-
- versionfile_build:
- Like versionfile_source, but relative to the build directory instead of
- the source directory. These will differ when your setup.py uses
- 'package_dir='. If you have package_dir={'myproject': 'src/myproject'},
- then you will probably have versionfile_build='myproject/_version.py' and
- versionfile_source='src/myproject/_version.py'.
-
- tag_prefix: a string, like 'PROJECTNAME-', which appears at the start of all
- VCS tags. If your tags look like 'myproject-1.2.0', then you
- should use tag_prefix='myproject-'. If you use unprefixed tags
- like '1.2.0', this should be an empty string.
-
- parentdir_prefix: a string, frequently the same as tag_prefix, which
- appears at the start of all unpacked tarball filenames. If
- your tarball unpacks into 'myproject-1.2.0', this should
- be 'myproject-'.
-
-To use it:
-
- 1: include this file in the top level of your project
- 2: make the following changes to the top of your setup.py:
- import versioneer
- versioneer.versionfile_source = 'src/myproject/_version.py'
- versioneer.versionfile_build = 'myproject/_version.py'
- versioneer.tag_prefix = '' # tags are like 1.2.0
- versioneer.parentdir_prefix = 'myproject-' # dirname like 'myproject-1.2.0'
- 3: add the following arguments to the setup() call in your setup.py:
- version=versioneer.get_version(),
- cmdclass=versioneer.get_cmdclass(),
- 4: run 'setup.py update_files', which will create _version.py, and will
- append the following to your __init__.py:
- from _version import __version__
- 5: modify your MANIFEST.in to include versioneer.py
- 6: add both versioneer.py and the generated _version.py to your VCS
-"""
-
-import os, sys, re
-from distutils.core import Command
-from distutils.command.sdist import sdist as _sdist
-from distutils.command.build import build as _build
-
-versionfile_source = None
-versionfile_build = None
-tag_prefix = None
-parentdir_prefix = None
-
-VCS = "git"
-IN_LONG_VERSION_PY = False
-
-
-LONG_VERSION_PY = '''
-IN_LONG_VERSION_PY = True
-# This file helps to compute a version number in source trees obtained from
-# git-archive tarball (such as those provided by githubs download-from-tag
-# feature). Distribution tarballs (build by setup.py sdist) and build
-# directories (produced by setup.py build) will contain a much shorter file
-# that just contains the computed version number.
-
-# This file is released into the public domain. Generated by
-# versioneer-0.7+ (https://github.com/warner/python-versioneer)
-
-# these strings will be replaced by git during git-archive
-git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s"
-git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s"
-
-
-import subprocess
-import sys
-
-def run_command(args, cwd=None, verbose=False):
- try:
- # remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
- except EnvironmentError:
- e = sys.exc_info()[1]
- if verbose:
- print("unable to run %%s" %% args[0])
- print(e)
- return None
- stdout = p.communicate()[0].strip()
- if sys.version >= '3':
- stdout = stdout.decode()
- if p.returncode != 0:
- if verbose:
- print("unable to run %%s (error)" %% args[0])
- return None
- return stdout
-
-
-import sys
-import re
-import os.path
-
-def get_expanded_variables(versionfile_source):
- # the code embedded in _version.py can just fetch the value of these
- # variables. When used from setup.py, we don't want to import
- # _version.py, so we do it with a regexp instead. This function is not
- # used from _version.py.
- variables = {}
- try:
- for line in open(versionfile_source,"r").readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- variables["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- variables["full"] = mo.group(1)
- except EnvironmentError:
- pass
- return variables
-
-def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
- refnames = variables["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("variables are unexpanded, not using")
- return {} # unexpanded, so not in an unpacked git-archive tarball
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
- for ref in list(refs):
- if not re.search(r'\d', ref):
- if verbose:
- print("discarding '%%s', no digits" %% ref)
- refs.discard(ref)
- # Assume all version tags have a digit. git's %%d expansion
- # behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us
- # distinguish between branches and tags. By ignoring refnames
- # without digits, we filter out many common branch names like
- # "release" and "stabilization", as well as "HEAD" and "master".
- if verbose:
- print("remaining refs: %%s" %% ",".join(sorted(refs)))
- for ref in sorted(refs):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- if verbose:
- print("picking %%s" %% r)
- return { "version": r,
- "full": variables["full"].strip() }
- # no suitable tags, so we use the full revision id
- if verbose:
- print("no suitable tags, using full revision id")
- return { "version": variables["full"].strip(),
- "full": variables["full"].strip() }
-
-def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
- # this runs 'git' from the root of the source tree. That either means
- # someone ran a setup.py command (and this code is in versioneer.py, so
- # IN_LONG_VERSION_PY=False, thus the containing directory is the root of
- # the source tree), or someone ran a project-specific entry point (and
- # this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
- # containing directory is somewhere deeper in the source tree). This only
- # gets called if the git-archive 'subst' variables were *not* expanded,
- # and _version.py hasn't already been rewritten with a short version
- # string, meaning we're inside a checked out source tree.
-
- try:
- here = os.path.abspath(__file__)
- except NameError:
- # some py2exe/bbfreeze/non-CPython implementations don't do __file__
- return {} # not always correct
-
- # versionfile_source is the relative path from the top of the source tree
- # (where the .git directory might live) to this file. Invert this to find
- # the root from __file__.
- root = here
- if IN_LONG_VERSION_PY:
- for i in range(len(versionfile_source.split("/"))):
- root = os.path.dirname(root)
- else:
- root = os.path.dirname(here)
- if not os.path.exists(os.path.join(root, ".git")):
- if verbose:
- print("no .git in %%s" %% root)
- return {}
-
- GIT = "git"
- if sys.platform == "win32":
- GIT = "git.cmd"
- stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
- cwd=root)
- if stdout is None:
- return {}
- if not stdout.startswith(tag_prefix):
- if verbose:
- print("tag '%%s' doesn't start with prefix '%%s'" %% (stdout, tag_prefix))
- return {}
- tag = stdout[len(tag_prefix):]
- stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
- if stdout is None:
- return {}
- full = stdout.strip()
- if tag.endswith("-dirty"):
- full += "-dirty"
- return {"version": tag, "full": full}
-
-
-def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
- if IN_LONG_VERSION_PY:
- # We're running from _version.py. If it's from a source tree
- # (execute-in-place), we can work upwards to find the root of the
- # tree, and then check the parent directory for a version string. If
- # it's in an installed application, there's no hope.
- try:
- here = os.path.abspath(__file__)
- except NameError:
- # py2exe/bbfreeze/non-CPython don't have __file__
- return {} # without __file__, we have no hope
- # versionfile_source is the relative path from the top of the source
- # tree to _version.py. Invert this to find the root from __file__.
- root = here
- for i in range(len(versionfile_source.split("/"))):
- root = os.path.dirname(root)
- else:
- # we're running from versioneer.py, which means we're running from
- # the setup.py in a source tree. sys.argv[0] is setup.py in the root.
- here = os.path.abspath(sys.argv[0])
- root = os.path.dirname(here)
-
- # Source tarballs conventionally unpack into a directory that includes
- # both the project name and a version string.
- dirname = os.path.basename(root)
- if not dirname.startswith(parentdir_prefix):
- if verbose:
- print("guessing rootdir is '%%s', but '%%s' doesn't start with prefix '%%s'" %%
- (root, dirname, parentdir_prefix))
- return None
- return {"version": dirname[len(parentdir_prefix):], "full": ""}
-
-tag_prefix = "%(TAG_PREFIX)s"
-parentdir_prefix = "%(PARENTDIR_PREFIX)s"
-versionfile_source = "%(VERSIONFILE_SOURCE)s"
-
-def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
- variables = { "refnames": git_refnames, "full": git_full }
- ver = versions_from_expanded_variables(variables, tag_prefix, verbose)
- if not ver:
- ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
- if not ver:
- ver = versions_from_parentdir(parentdir_prefix, versionfile_source,
- verbose)
- if not ver:
- ver = default
- return ver
-
-'''
-
-
-import subprocess
-import sys
-
-def run_command(args, cwd=None, verbose=False):
- try:
- # remember shell=False, so use git.cmd on windows, not just git
- p = subprocess.Popen(args, stdout=subprocess.PIPE, cwd=cwd)
- except EnvironmentError:
- e = sys.exc_info()[1]
- if verbose:
- print("unable to run %s" % args[0])
- print(e)
- return None
- stdout = p.communicate()[0].strip()
- if sys.version >= '3':
- stdout = stdout.decode()
- if p.returncode != 0:
- if verbose:
- print("unable to run %s (error)" % args[0])
- return None
- return stdout
-
-
-import sys
-import re
-import os.path
-
-def get_expanded_variables(versionfile_source):
- # the code embedded in _version.py can just fetch the value of these
- # variables. When used from setup.py, we don't want to import
- # _version.py, so we do it with a regexp instead. This function is not
- # used from _version.py.
- variables = {}
- try:
- for line in open(versionfile_source,"r").readlines():
- if line.strip().startswith("git_refnames ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- variables["refnames"] = mo.group(1)
- if line.strip().startswith("git_full ="):
- mo = re.search(r'=\s*"(.*)"', line)
- if mo:
- variables["full"] = mo.group(1)
- except EnvironmentError:
- pass
- return variables
-
-def versions_from_expanded_variables(variables, tag_prefix, verbose=False):
- refnames = variables["refnames"].strip()
- if refnames.startswith("$Format"):
- if verbose:
- print("variables are unexpanded, not using")
- return {} # unexpanded, so not in an unpacked git-archive tarball
- refs = set([r.strip() for r in refnames.strip("()").split(",")])
- for ref in list(refs):
- if not re.search(r'\d', ref):
- if verbose:
- print("discarding '%s', no digits" % ref)
- refs.discard(ref)
- # Assume all version tags have a digit. git's %d expansion
- # behaves like git log --decorate=short and strips out the
- # refs/heads/ and refs/tags/ prefixes that would let us
- # distinguish between branches and tags. By ignoring refnames
- # without digits, we filter out many common branch names like
- # "release" and "stabilization", as well as "HEAD" and "master".
- if verbose:
- print("remaining refs: %s" % ",".join(sorted(refs)))
- for ref in sorted(refs):
- # sorting will prefer e.g. "2.0" over "2.0rc1"
- if ref.startswith(tag_prefix):
- r = ref[len(tag_prefix):]
- if verbose:
- print("picking %s" % r)
- return { "version": r,
- "full": variables["full"].strip() }
- # no suitable tags, so we use the full revision id
- if verbose:
- print("no suitable tags, using full revision id")
- return { "version": variables["full"].strip(),
- "full": variables["full"].strip() }
-
-def versions_from_vcs(tag_prefix, versionfile_source, verbose=False):
- # this runs 'git' from the root of the source tree. That either means
- # someone ran a setup.py command (and this code is in versioneer.py, so
- # IN_LONG_VERSION_PY=False, thus the containing directory is the root of
- # the source tree), or someone ran a project-specific entry point (and
- # this code is in _version.py, so IN_LONG_VERSION_PY=True, thus the
- # containing directory is somewhere deeper in the source tree). This only
- # gets called if the git-archive 'subst' variables were *not* expanded,
- # and _version.py hasn't already been rewritten with a short version
- # string, meaning we're inside a checked out source tree.
-
- try:
- here = os.path.abspath(__file__)
- except NameError:
- # some py2exe/bbfreeze/non-CPython implementations don't do __file__
- return {} # not always correct
-
- # versionfile_source is the relative path from the top of the source tree
- # (where the .git directory might live) to this file. Invert this to find
- # the root from __file__.
- root = here
- if IN_LONG_VERSION_PY:
- for i in range(len(versionfile_source.split("/"))):
- root = os.path.dirname(root)
- else:
- root = os.path.dirname(here)
- if not os.path.exists(os.path.join(root, ".git")):
- if verbose:
- print("no .git in %s" % root)
- return {}
-
- GIT = "git"
- if sys.platform == "win32":
- GIT = "git.cmd"
- stdout = run_command([GIT, "describe", "--tags", "--dirty", "--always"],
- cwd=root)
- if stdout is None:
- return {}
- if not stdout.startswith(tag_prefix):
- if verbose:
- print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
- return {}
- tag = stdout[len(tag_prefix):]
- stdout = run_command([GIT, "rev-parse", "HEAD"], cwd=root)
- if stdout is None:
- return {}
- full = stdout.strip()
- if tag.endswith("-dirty"):
- full += "-dirty"
- return {"version": tag, "full": full}
-
-
-def versions_from_parentdir(parentdir_prefix, versionfile_source, verbose=False):
- if IN_LONG_VERSION_PY:
- # We're running from _version.py. If it's from a source tree
- # (execute-in-place), we can work upwards to find the root of the
- # tree, and then check the parent directory for a version string. If
- # it's in an installed application, there's no hope.
- try:
- here = os.path.abspath(__file__)
- except NameError:
- # py2exe/bbfreeze/non-CPython don't have __file__
- return {} # without __file__, we have no hope
- # versionfile_source is the relative path from the top of the source
- # tree to _version.py. Invert this to find the root from __file__.
- root = here
- for i in range(len(versionfile_source.split("/"))):
- root = os.path.dirname(root)
- else:
- # we're running from versioneer.py, which means we're running from
- # the setup.py in a source tree. sys.argv[0] is setup.py in the root.
- here = os.path.abspath(sys.argv[0])
- root = os.path.dirname(here)
-
- # Source tarballs conventionally unpack into a directory that includes
- # both the project name and a version string.
- dirname = os.path.basename(root)
- if not dirname.startswith(parentdir_prefix):
- if verbose:
- print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
- (root, dirname, parentdir_prefix))
- return None
- return {"version": dirname[len(parentdir_prefix):], "full": ""}
-
-import sys
-
-def do_vcs_install(versionfile_source, ipy):
- GIT = "git"
- if sys.platform == "win32":
- GIT = "git.cmd"
- run_command([GIT, "add", "versioneer.py"])
- run_command([GIT, "add", versionfile_source])
- run_command([GIT, "add", ipy])
- present = False
- try:
- f = open(".gitattributes", "r")
- for line in f.readlines():
- if line.strip().startswith(versionfile_source):
- if "export-subst" in line.strip().split()[1:]:
- present = True
- f.close()
- except EnvironmentError:
- pass
- if not present:
- f = open(".gitattributes", "a+")
- f.write("%s export-subst\n" % versionfile_source)
- f.close()
- run_command([GIT, "add", ".gitattributes"])
-
-
-SHORT_VERSION_PY = """
-# This file was generated by 'versioneer.py' (0.7+) from
-# revision-control system data, or from the parent directory name of an
-# unpacked source archive. Distribution tarballs contain a pre-generated copy
-# of this file.
-
-version_version = '%(version)s'
-version_full = '%(full)s'
-def get_versions(default={}, verbose=False):
- return {'version': version_version, 'full': version_full}
-
-"""
-
-DEFAULT = {"version": "unknown", "full": "unknown"}
-
-def versions_from_file(filename):
- versions = {}
- try:
- f = open(filename)
- except EnvironmentError:
- return versions
- for line in f.readlines():
- mo = re.match("version_version = '([^']+)'", line)
- if mo:
- versions["version"] = mo.group(1)
- mo = re.match("version_full = '([^']+)'", line)
- if mo:
- versions["full"] = mo.group(1)
- return versions
-
-def write_to_version_file(filename, versions):
- f = open(filename, "w")
- f.write(SHORT_VERSION_PY % versions)
- f.close()
- print("set %s to '%s'" % (filename, versions["version"]))
-
-
-def get_best_versions(versionfile, tag_prefix, parentdir_prefix,
- default=DEFAULT, verbose=False):
- # returns dict with two keys: 'version' and 'full'
- #
- # extract version from first of _version.py, 'git describe', parentdir.
- # This is meant to work for developers using a source checkout, for users
- # of a tarball created by 'setup.py sdist', and for users of a
- # tarball/zipball created by 'git archive' or github's download-from-tag
- # feature.
-
- variables = get_expanded_variables(versionfile_source)
- if variables:
- ver = versions_from_expanded_variables(variables, tag_prefix)
- if ver:
- if verbose: print("got version from expanded variable %s" % ver)
- return ver
-
- ver = versions_from_file(versionfile)
- if ver:
- if verbose: print("got version from file %s %s" % (versionfile, ver))
- return ver
-
- ver = versions_from_vcs(tag_prefix, versionfile_source, verbose)
- if ver:
- if verbose: print("got version from git %s" % ver)
- return ver
-
- ver = versions_from_parentdir(parentdir_prefix, versionfile_source, verbose)
- if ver:
- if verbose: print("got version from parentdir %s" % ver)
- return ver
-
- if verbose: print("got version from default %s" % ver)
- return default
-
-def get_versions(default=DEFAULT, verbose=False):
- assert versionfile_source is not None, "please set versioneer.versionfile_source"
- assert tag_prefix is not None, "please set versioneer.tag_prefix"
- assert parentdir_prefix is not None, "please set versioneer.parentdir_prefix"
- return get_best_versions(versionfile_source, tag_prefix, parentdir_prefix,
- default=default, verbose=verbose)
-def get_version(verbose=False):
- return get_versions(verbose=verbose)["version"]
-
-class cmd_version(Command):
- description = "report generated version string"
- user_options = []
- boolean_options = []
- def initialize_options(self):
- pass
- def finalize_options(self):
- pass
- def run(self):
- ver = get_version(verbose=True)
- print("Version is currently: %s" % ver)
-
-
-class cmd_build(_build):
- def run(self):
- versions = get_versions(verbose=True)
- _build.run(self)
- # now locate _version.py in the new build/ directory and replace it
- # with an updated value
- target_versionfile = os.path.join(self.build_lib, versionfile_build)
- print("UPDATING %s" % target_versionfile)
- os.unlink(target_versionfile)
- f = open(target_versionfile, "w")
- f.write(SHORT_VERSION_PY % versions)
- f.close()
-
-class cmd_sdist(_sdist):
- def run(self):
- versions = get_versions(verbose=True)
- self._versioneer_generated_versions = versions
- # unless we update this, the command will keep using the old version
- self.distribution.metadata.version = versions["version"]
- return _sdist.run(self)
-
- def make_release_tree(self, base_dir, files):
- _sdist.make_release_tree(self, base_dir, files)
- # now locate _version.py in the new base_dir directory (remembering
- # that it may be a hardlink) and replace it with an updated value
- target_versionfile = os.path.join(base_dir, versionfile_source)
- print("UPDATING %s" % target_versionfile)
- os.unlink(target_versionfile)
- f = open(target_versionfile, "w")
- f.write(SHORT_VERSION_PY % self._versioneer_generated_versions)
- f.close()
-
-INIT_PY_SNIPPET = """
-from ._version import get_versions
-__version__ = get_versions()['version']
-del get_versions
-"""
-
-class cmd_update_files(Command):
- description = "modify __init__.py and create _version.py"
- user_options = []
- boolean_options = []
- def initialize_options(self):
- pass
- def finalize_options(self):
- pass
- def run(self):
- ipy = os.path.join(os.path.dirname(versionfile_source), "__init__.py")
- print(" creating %s" % versionfile_source)
- f = open(versionfile_source, "w")
- f.write(LONG_VERSION_PY % {"DOLLAR": "$",
- "TAG_PREFIX": tag_prefix,
- "PARENTDIR_PREFIX": parentdir_prefix,
- "VERSIONFILE_SOURCE": versionfile_source,
- })
- f.close()
- try:
- old = open(ipy, "r").read()
- except EnvironmentError:
- old = ""
- if INIT_PY_SNIPPET not in old:
- print(" appending to %s" % ipy)
- f = open(ipy, "a")
- f.write(INIT_PY_SNIPPET)
- f.close()
- else:
- print(" %s unmodified" % ipy)
- do_vcs_install(versionfile_source, ipy)
-
-def get_cmdclass():
- return {'version': cmd_version,
- 'update_files': cmd_update_files,
- 'build': cmd_build,
- 'sdist': cmd_sdist,
- }