summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authordrebs <drebs@riseup.net>2017-09-17 16:02:10 -0300
committerdrebs <drebs@riseup.net>2017-09-17 16:02:10 -0300
commit14b155b33845f27a414f8c8ad0d6828b7301aae9 (patch)
tree0f4bfc91cabd93191544e46e58d4361061efec30
parentcfff46ff9becdbe5cf48816870e625ed253ecc57 (diff)
[test] split benchmarks in different ci jobs
-rw-r--r--.gitlab-ci.yml29
-rw-r--r--tox.ini40
2 files changed, 47 insertions, 22 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ad0f2abb..d1c2e472 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -57,7 +57,7 @@ tests:
script:
- tox -- --couch-url http://couchdb:5984
-benchmark:
+time-cpu:
stage: benchmark
image: 0xacab.org:4567/leap/soledad:latest
tags:
@@ -78,7 +78,32 @@ benchmark:
# Add $PYTEST_OPTS to pytest.ini to allow posting benchmark tests
# to an elasticsearch instance
- echo "addopts=$PYTEST_OPTS" >> pytest.ini && chmod 600 pytest.ini
- - /usr/bin/unbuffer tox --recreate -e benchmark -- --couch-url http://couchdb:5984 | /usr/bin/ts -s
+ - /usr/bin/unbuffer tox --recreate -e benchmark-time-cpu -- --couch-url http://couchdb:5984 | /usr/bin/ts -s
+ # Output locally saved benchmarks if they exist
+ - 'if [ -d .benchmarks ]; then find .benchmarks -type f -exec cat {} \; ; fi'
+
+memory:
+ stage: benchmark
+ image: 0xacab.org:4567/leap/soledad:latest
+ tags:
+ - benchmark
+ services:
+ - couchdb
+ script:
+ - git checkout -B "$CI_COMMIT_REF_NAME" "$CI_COMMIT_SHA"
+ - '[ -n "${BENCHMARK_ALL_COMMITS}" ] && git checkout origin/master scripts/benchmark/setup-all-commits-env.sh'
+ - '[ -n "${BENCHMARK_ALL_COMMITS}" ] && ./scripts/benchmark/setup-all-commits-env.sh'
+ - curl -s couchdb:5984
+ # You can provide a $NETRC variable containing the creds for your
+ # elasticsearch instance so it's protected from being leaked in the
+ # CI console
+ # We can't get it working inside docker for unknown reasons.
+ # - echo "$NETRC" > /root/.netrc && chmod 600 /root/.netrc
+ #
+ # Add $PYTEST_OPTS to pytest.ini to allow posting benchmark tests
+ # to an elasticsearch instance
+ - echo "addopts=$PYTEST_OPTS" >> pytest.ini && chmod 600 pytest.ini
+ - /usr/bin/unbuffer tox --recreate -e benchmark-memory -- --couch-url http://couchdb:5984 | /usr/bin/ts -s
# Output locally saved benchmarks if they exist
- 'if [ -d .benchmarks ]; then find .benchmarks -type f -exec cat {} \; ; fi'
diff --git a/tox.ini b/tox.ini
index d1e03366..3fa4ed54 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
envlist = py27
-skipsdist=True
+skipsdist = True
[testenv]
basepython = python2.7
@@ -73,23 +73,6 @@ setenv =
TERM=xterm
install_command = pip3 install {opts} {packages}
-[testenv:benchmark]
-usedevelop = True
-deps =
- {[testenv]deps}
-commands =
-# run benchmarks twice: once for time and cpu and a second time for memory
- py.test --subdir=benchmarks {posargs}
- py.test --subdir=benchmarks --watch-memory {posargs}
-passenv = HOST_HOSTNAME
-
-[testenv:responsiveness]
-usedevelop = True
-deps =
- {[testenv:benchmark]deps}
-commands =
- py.test --subdir=responsiveness {posargs}
-
[testenv:code-check]
deps =
pep8
@@ -98,10 +81,27 @@ commands =
pep8
flake8
+[testenv:benchmark-time-cpu]
+usedevelop = True
+deps = {[testenv]deps}
+commands = py.test --subdir=benchmarks {posargs}
+passenv = HOST_HOSTNAME
+
+[testenv:benchmark-memory]
+usedevelop = True
+deps = {[testenv]deps}
+commands = py.test --subdir=benchmarks --watch-memory {posargs}
+passenv = HOST_HOSTNAME
+
+[testenv:responsiveness]
+usedevelop = True
+deps = {[testenv]deps}
+commands = py.test --subdir=responsiveness {posargs}
+passenv = HOST_HOSTNAME
+
[testenv:parallel]
deps =
{[testenv]deps}
pytest-xdist
install_command = pip install {opts} {packages}
-commands =
- py.test {posargs} -n 4
+commands = py.test {posargs} -n 4