summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorRuben Pollan <meskio@sindominio.net>2014-10-30 11:01:14 -0600
committerRuben Pollan <meskio@sindominio.net>2014-10-30 11:01:14 -0600
commit6e20c53cfba23be039a70a0342abb74d9a4dc0bd (patch)
tree705d153a631f478ed84fd60584b2458e8f205319
parent5edb0fb980224f8b60e1627930dc8a3c0a0bbb88 (diff)
parent1206082bdf0a25a2675296c01cf798e4da835d1b (diff)
Merge branch 'ivan/feature/get-tuf-into-bundler' into develop
-rw-r--r--README.md12
-rw-r--r--bitmask-0.7.0rc3.json11
-rw-r--r--bitmask-nightly.json10
-rw-r--r--bundler/actions.py177
-rw-r--r--bundler/main.py67
-rwxr-xr-xcreatebundle.sh66
-rw-r--r--pkg/requirements.pip6
-rwxr-xr-xtuf-stuff.sh37
-rwxr-xr-xtuf/init.py102
-rwxr-xr-xtuf/release.py135
10 files changed, 518 insertions, 105 deletions
diff --git a/README.md b/README.md
index 19e6732..e19f984 100644
--- a/README.md
+++ b/README.md
@@ -35,11 +35,17 @@ shouldn't be a problem.
You need to copy the scripts `createbundle.sh` and `copy-binaries.sh` to a VM
and run `./createbundle.sh`, after that it should be all automagically
processed.
-
-You can start the script with the parameter `nightly` to build a bundle from
-the latest `develop` code, otherwise it will bundle from the latest tag.
+As an input for the `createbundle.sh` script you need to have a `bitmask.json`
+file in the same folder, there is an example file on this repo, you can use it
+as a template. There is a json file named `bitmask-nightly.json` that is meant
+to be used to create a bundle combining all the `develop` branches.
The resulting bundle will be saved in:
`/home/leap/bitmask.bundle/bundle.output/` under some name like
`Bitmask-linux64-2014-09-24-9b3b7f6f.tar.bz2` in case of bundling a *nightly*
release, or `Bitmask-linux64-0.7.0.tar.bz2` in case of a *normal* release.
+
+After the bundling process finishes it creates a file named
+`reuse-binaries.lock` that if you don't delete it, the bundler will reuse the
+compiled libraries and binaries (like `openvpn` and `PySide`) saving a lot of
+time the next time that a bundler is executed.
diff --git a/bitmask-0.7.0rc3.json b/bitmask-0.7.0rc3.json
new file mode 100644
index 0000000..70559c8
--- /dev/null
+++ b/bitmask-0.7.0rc3.json
@@ -0,0 +1,11 @@
+{
+ "version": "0.7.0rc3",
+ "tuf_repo": "unstable",
+ "bitmask_client": "0.7.0rc3",
+ "soledad": "0.6.0",
+ "leap_pycommon": "0.3.9",
+ "keymanager": "0.3.8",
+ "leap_mail": "0.3.10",
+ "bitmask_launcher": "0.3.2",
+ "leap_assets": "master"
+}
diff --git a/bitmask-nightly.json b/bitmask-nightly.json
new file mode 100644
index 0000000..563fa5d
--- /dev/null
+++ b/bitmask-nightly.json
@@ -0,0 +1,10 @@
+{
+ "version": "nightly",
+ "bitmask_client": "develop",
+ "soledad": "develop",
+ "leap_pycommon": "develop",
+ "keymanager": "develop",
+ "leap_mail": "develop",
+ "bitmask_launcher": "develop",
+ "leap_assets": "master"
+}
diff --git a/bundler/actions.py b/bundler/actions.py
index 20d99f5..1f688e3 100644
--- a/bundler/actions.py
+++ b/bundler/actions.py
@@ -1,5 +1,6 @@
import datetime
import hashlib
+import json
import os
import stat
import subprocess
@@ -65,14 +66,17 @@ class Action(object):
def run(self, *args, **kwargs):
pass
+ def log(self, msg):
+ print "{0}: {1}".format(self._name.upper(), msg)
+
def skippable(func):
def skip_func(self, *args, **kwargs):
if self.skip:
- print "Skipping...", self.name
+ print "SKIPPING: {0}...".format(self.name)
return
if not self.do:
- print "Skipping...", self.name
+ print "SKIPPING: {0}...".format(self.name)
return
return func(self, *args, **kwargs)
return skip_func
@@ -94,11 +98,8 @@ def push_pop(*directories):
cd(os.path.join(*(("..",)*len(directories))))
-def get_version(repos, nightly):
- if not nightly:
- version = "unknown"
- with push_pop("bitmask_client"):
- version = git("describe", "--tags").strip()
+def get_version(repos, version):
+ if version is not None and version != 'nightly':
return version
m = hashlib.sha256()
@@ -125,31 +126,53 @@ class GitCloneAll(Action):
return "git://github.com/leapcode/{0}".format(repo_name)
@skippable
- def run(self, sorted_repos, nightly):
- print "Cloning repositories..."
+ def run(self, sorted_repos):
+ self.log("cloning repositories...")
cd(self._basedir)
for repo in sorted_repos:
- print "Cloning", repo
+ self.log("cloning {0}".format(repo))
rm("-rf", repo)
git.clone(self._repo_url(repo), repo)
+ self.log("done cloning repos.")
+
+
+class GitCheckout(Action):
+ def __init__(self, basedir, skip, do):
+ Action.__init__(self, "gitcheckout", basedir, skip, do)
+
+ def _repo_url(self, repo_name):
+ if repo_name == "leap_assets":
+ return "git://leap.se/leap_assets"
+ return "git://github.com/leapcode/{0}".format(repo_name)
+
+ @skippable
+ def run(self, sorted_repos, versions_file):
+ self.log("`git checkout` repositories...")
+
+ versions = None
+ with open(versions_file, 'r') as f:
+ versions = json.load(f)
+
+ cd(self._basedir)
+
+ for repo in sorted_repos:
+ if repo not in versions:
+ self.log("skipping {0}, no version specified.".format(repo))
+ continue
+
+ where = versions[repo] # where to checkout
+ self.log("Checkout {0} -> {1}".format(repo, where))
+
with push_pop(repo):
- if repo in ["leap_assets"]:
- # leap_assets only has 'master'
- continue
+ git.fetch()
+ git.checkout("--quiet", where)
- if not nightly:
- git.checkout("master")
- git.pull("--ff-only", "origin", "master")
- git.fetch()
- git.reset("--hard", "origin/master")
- latest_tag = git.describe("--abbrev=0").strip()
- git.checkout("--quiet", latest_tag)
- else:
- git.checkout("develop")
+ # just in case that we didn't just cloned but updated:
+ git.reset("--hard", where)
- print "Done cloning repos..."
+ self.log("done checking out repos.")
class PythonSetupAll(Action):
@@ -157,11 +180,11 @@ class PythonSetupAll(Action):
Action.__init__(self, "pythonsetup", basedir, skip, do)
def _build_client(self, repo, binaries_path):
- print "Running make on the client..."
+ self.log("running make on the client...")
make()
- print "Running build to get correct version..."
+ self.log("running build to get correct version...")
python("setup.py", "build")
- print "Updating hashes"
+ self.log("updating hashes")
os.environ["OPENVPN_BIN"] = os.path.join(
binaries_path, "openvpn.files", "leap-openvpn")
os.environ["BITMASK_ROOT"] = os.path.join(
@@ -174,10 +197,10 @@ class PythonSetupAll(Action):
for repo in sorted_repos:
if repo in ["bitmask_launcher", "leap_assets"]:
- print "Skipping repo: {0}...".format(repo)
+ self.log("skipping repo: {0}...".format(repo))
continue
- print "Setting up", repo
+ self.log("setting up {0}".format(repo))
if repo == "soledad":
for subrepo in ["common", "client"]:
@@ -210,7 +233,7 @@ class CreateDirStructure(Action):
@skippable
def run(self):
- print "Creating directory structure..."
+ self.log("creating directory structure...")
if IS_MAC:
self._darwin_create_dir_structure()
self._create_dir_structure(os.path.join(self._basedir,
@@ -218,7 +241,7 @@ class CreateDirStructure(Action):
"Contents", "MacOS"))
else:
self._create_dir_structure(self._basedir)
- print "Done"
+ self.log("done.")
def _create_dir_structure(self, basedir):
mkdirp = mkdir.bake("-p")
@@ -247,7 +270,7 @@ class CollectAllDeps(Action):
Action.__init__(self, "collectdeps", basedir, skip, do)
def _remove_unneeded(self, lib_dir):
- print "Removing unneeded files..."
+ self.log("removing unneeded files...")
files = find(lib_dir).strip().splitlines()
keep = ["QtCore.so",
"QtGui.so",
@@ -269,11 +292,11 @@ class CollectAllDeps(Action):
if os.path.split(f)[1] not in keep:
rm("-rf", f)
pass
- print "Done"
+ self.log("done.")
@skippable
def run(self, path_file):
- print "Collecting dependencies..."
+ self.log("collecting dependencies...")
app_py = os.path.join(self._basedir,
"bitmask_client",
"src",
@@ -284,7 +307,7 @@ class CollectAllDeps(Action):
collect_deps(app_py, dest_lib_dir, path_file)
self._remove_unneeded(dest_lib_dir)
- print "Done"
+ self.log("done.")
class CopyBinaries(Action):
@@ -293,7 +316,7 @@ class CopyBinaries(Action):
@skippable
def run(self, binaries_path):
- print "Copying binaries..."
+ self.log("copying binaries...")
dest_lib_dir = platform_dir(self._basedir, "lib")
if IS_MAC:
@@ -364,7 +387,7 @@ class CopyBinaries(Action):
mail_dir = platform_dir(self._basedir, "apps", "mail")
cp(_convert_path_for_win(os.path.join(binaries_path, "gpg")),
_convert_path_for_win(mail_dir))
- print "Done"
+ self.log("done.")
class PLister(Action):
@@ -404,14 +427,14 @@ class PLister(Action):
@skippable
def run(self):
- print "Generating Info.plist file..."
+ self.log("generating Info.plist file...")
file_util.write_file(os.path.join(self._basedir,
"Bitmask",
"Bitmask.app",
"Contents",
"Info.plist"),
self.plist)
- print "Generating qt.conf file..."
+ self.log("generating qt.conf file...")
file_util.write_file(os.path.join(self._basedir,
"Bitmask",
"Bitmask.app",
@@ -419,7 +442,7 @@ class PLister(Action):
"Resources",
"qt.conf"),
self.qtconf)
- print "Done"
+ self.log("done.")
class SeededConfig(Action):
@@ -428,10 +451,10 @@ class SeededConfig(Action):
@skippable
def run(self, seeded_config):
- print "Copying seeded config..."
+ self.log("copying seeded config...")
dir_util.copy_tree(seeded_config,
platform_dir(self._basedir, "config"))
- print "Done"
+ self.log("done.")
class DarwinLauncher(Action):
@@ -455,7 +478,7 @@ class DarwinLauncher(Action):
@skippable
def run(self):
- print "Generating launcher script for OSX..."
+ self.log("generating launcher script for OSX...")
launcher_path = os.path.join(self._basedir,
"Bitmask",
"Bitmask.app",
@@ -466,7 +489,7 @@ class DarwinLauncher(Action):
os.chmod(launcher_path, stat.S_IRGRP | stat.S_IROTH | stat.S_IRUSR
| stat.S_IWGRP | stat.S_IWOTH | stat.S_IWUSR
| stat.S_IXGRP | stat.S_IXOTH | stat.S_IXUSR)
- print "Done"
+ self.log("done.")
class CopyAssets(Action):
@@ -475,7 +498,7 @@ class CopyAssets(Action):
@skippable
def run(self):
- print "Copying assets..."
+ self.log("copying assets...")
resources_dir = os.path.join(self._basedir,
"Bitmask",
"Bitmask.app",
@@ -485,7 +508,7 @@ class CopyAssets(Action):
resources_dir)
cp(os.path.join(self._basedir, "leap_assets", "mac", "bitmask.tiff"),
resources_dir)
- print "Done"
+ self.log("done.")
class CopyMisc(Action):
@@ -494,22 +517,24 @@ class CopyMisc(Action):
updater_delay = 60
[Mirror.localhost]
- url_prefix = http://dl.bitmask.net/tuf""")
+ url_prefix = {0}""")
+ TUF_STABLE = "https://dl.bitmask.net/tuf"
+ TUF_UNSTABLE = "https://dl.bitmask.net/tuf-unstable"
def __init__(self, basedir, skip, do):
Action.__init__(self, "copymisc", basedir, skip, do)
@skippable
- def run(self, binary_path):
- print "Downloading thunderbird extension..."
+ def run(self, binary_path, tuf_repo):
+ self.log("downloading thunderbird extension...")
ext_path = platform_dir(self._basedir, "apps",
"bitmask-thunderbird-latest.xpi")
urllib.urlretrieve(
"https://downloads.leap.se/thunderbird_extension/"
"bitmask-thunderbird-latest.xpi",
ext_path)
- print "Done"
- print "Copying misc files..."
+ self.log("done")
+ self.log("copying misc files...")
apps_dir = _convert_path_for_win(platform_dir(self._basedir, "apps"))
cp(_convert_path_for_win(
os.path.join(self._basedir, "bitmask_launcher", "src",
@@ -537,15 +562,21 @@ class CopyMisc(Action):
_convert_path_for_win(os.path.join(self._basedir, "Bitmask")))
launcher_path = os.path.join(self._basedir, "Bitmask", "launcher.conf")
+
+ if tuf_repo == 'stable':
+ tuf_config = self.TUF_CONFIG.format(self.TUF_STABLE)
+ elif tuf_repo == 'unstable':
+ tuf_config = self.TUF_CONFIG.format(self.TUF_UNSTABLE)
+
with open(launcher_path, "w") as f:
- f.write(self.TUF_CONFIG)
+ f.write(tuf_config)
metadata = os.path.join(self._basedir, "Bitmask", "repo", "metadata")
mkdir("-p", os.path.join(metadata, "current"))
mkdir("-p", os.path.join(metadata, "previous"))
cp(os.path.join(binary_path, "root.json"),
os.path.join(metadata, "current"))
- print "Done"
+ self.log("done")
class FixDylibs(Action):
@@ -563,7 +594,7 @@ class DmgIt(Action):
@skippable
def run(self, repos, nightly):
- print "Dmg'ing it..."
+ self.log("Dmg'ing it...")
cd(self._basedir)
version = get_version(repos, nightly)
dmg_dir = os.path.join(self._basedir, "dmg")
@@ -598,7 +629,7 @@ class DmgIt(Action):
"-imagekey", "zlib-level=9", "-o",
dmg_path)
rm("-f", raw_dmg_path)
- print "Done"
+ self.log("Done")
class TarballIt(Action):
@@ -607,7 +638,7 @@ class TarballIt(Action):
@skippable
def run(self, repos, nightly):
- print "Tarballing it..."
+ self.log("Tarballing it...")
cd(self._basedir)
version = get_version(repos, nightly)
import platform
@@ -615,7 +646,7 @@ class TarballIt(Action):
bundle_name = "Bitmask-linux%s-%s" % (bits, version)
mv("Bitmask", bundle_name)
tar("cjf", bundle_name+".tar.bz2", bundle_name)
- print "Done"
+ self.log("Done")
class PycRemover(Action):
@@ -624,18 +655,18 @@ class PycRemover(Action):
@skippable
def run(self):
- print "Removing .pyc files..."
+ self.log("Removing .pyc files...")
files = find(self._basedir, "-name", "*.pyc").strip().splitlines()
for f in files:
rm(f)
files = find(self._basedir, "-name", "*\\.so*").strip().splitlines()
for f in files:
- print "Stripping", f
+ self.log("Stripping {0}".format(f))
try:
strip(f)
except:
pass
- print "Done"
+ self.log("Done")
class MtEmAll(Action):
@@ -644,7 +675,7 @@ class MtEmAll(Action):
@skippable
def run(self):
- print "Mt'ing all the files..."
+ self.log("Mt'ing all the files...")
cd(os.path.join(self._basedir, "Bitmask"))
subprocess.check_call(
["C:\\Program Files\\Windows Kits\\8.0\\bin\\x86\\mt.exe",
@@ -655,7 +686,7 @@ class MtEmAll(Action):
["C:\\Program Files\\Windows Kits\\8.0\\bin\\x86\\mt.exe",
"-nologo", "-manifest", "openvpn_leap.exe.manifest",
"-outputresource:openvpn_leap.exe;#1"])
- print "Done"
+ self.log("Done")
class ZipIt(Action):
@@ -669,7 +700,7 @@ class ZipIt(Action):
@skippable
def run(self, repos, nightly):
- print "Ziping it..."
+ self.log("Ziping it...")
cd(self._basedir)
version = get_version(repos, nightly)
name = "Bitmask-win32-{0}".format(version)
@@ -678,7 +709,7 @@ class ZipIt(Action):
zf = zipfile.ZipFile("{0}.zip".format(name), "w", zipfile.ZIP_DEFLATED)
self._zipdir(name, zf)
zf.close()
- print "Done"
+ self.log("Done")
class SignIt(Action):
@@ -687,7 +718,7 @@ class SignIt(Action):
@skippable
def run(self, identity):
- print "Signing tuntap kext..."
+ self.log("Signing tuntap kext...")
kext = os.path.join(self._basedir,
"Bitmask",
"Bitmask.app",
@@ -698,8 +729,8 @@ class SignIt(Action):
"Extensions",
"tun.kext")
codesign("-s", identity, "--deep", kext)
- print "Done"
- print "Signing tuntap installer..."
+ self.log("Done")
+ self.log("Signing tuntap installer...")
tuntap_app = os.path.join(self._basedir,
"Bitmask",
"Bitmask.app",
@@ -707,14 +738,14 @@ class SignIt(Action):
"Resources",
"tuntap-installer.app")
codesign("-s", identity, "--deep", tuntap_app)
- print "Done"
- print "Signing main structure, this will take a while..."
+ self.log("Done")
+ self.log("Signing main structure, this will take a while...")
main_app = os.path.join(self._basedir,
"Bitmask",
"Bitmask.app")
- print codesign("-s", identity, "--force",
- "--deep", "--verbose", main_app)
- print "Done"
+ self.log(codesign("-s", identity, "--force",
+ "--deep", "--verbose", main_app))
+ self.log("Done")
class RemoveUnused(Action):
@@ -723,7 +754,7 @@ class RemoveUnused(Action):
@skippable
def run(self):
- print "Removing unused python code..."
+ self.log("Removing unused python code...")
test_dirs = find(self._basedir, "-name", "*test*").strip().splitlines()
for td in test_dirs:
rm("-rf", os.path.join(self._basedir, td))
@@ -731,4 +762,4 @@ class RemoveUnused(Action):
# twisted_used = ["aplication", "conch", "cred",
# "version", "internet", "mail"]
# twisted_files = find(self._basedir, "-name", "t
- print "Done"
+ self.log("Done")
diff --git a/bundler/main.py b/bundler/main.py
index 326e80b..733a241 100644
--- a/bundler/main.py
+++ b/bundler/main.py
@@ -4,17 +4,18 @@
# - Create complete bundle changelog
import argparse
+import json
import os
import tempfile
from contextlib import contextmanager
from distutils import dir_util
-from actions import GitCloneAll, PythonSetupAll, CreateDirStructure
+from actions import GitCloneAll, GitCheckout, PythonSetupAll
from actions import CollectAllDeps, CopyBinaries, PLister, SeededConfig
from actions import DarwinLauncher, CopyAssets, CopyMisc, FixDylibs
from actions import DmgIt, PycRemover, TarballIt, MtEmAll, ZipIt, SignIt
-from actions import RemoveUnused
+from actions import RemoveUnused, CreateDirStructure
from utils import IS_MAC, IS_WIN
@@ -40,15 +41,54 @@ def new_build_dir(default=None):
dir_util.remove_tree(bd)
+def _get_dict_from_json(json_file):
+ data = {}
+
+ try:
+ with open(json_file, 'r') as f:
+ data = json.load(f)
+ except Exception as e:
+ print "Problem loading json: {0!r}".format(e)
+ pass
+
+ return data
+
+
+def get_version(versions_file):
+ """
+ Return the "version" data on the json file given as parameter.
+
+ :param versions_file: the file name of the json to parse.
+ :type versions_file: str
+
+ :rtype: str or None
+ """
+ versions = _get_dict_from_json(versions_file)
+ return versions.get('version')
+
+
+def get_tuf_repo(versions_file):
+ """
+ Return the "tuf_repo" data on the json file given as parameter.
+
+ :param versions_file: the file name of the json to parse.
+ :type versions_file: str
+
+ :rtype: str or None
+ """
+ versions = _get_dict_from_json(versions_file)
+ return versions.get('tuf_repo')
+
+
def main():
parser = argparse.ArgumentParser(description='Bundle creation tool.')
parser.add_argument('--workon', help="")
parser.add_argument('--skip', nargs="*", default=[], help="")
parser.add_argument('--do', nargs="*", default=[], help="")
parser.add_argument('--paths-file', help="")
+ parser.add_argument('--versions-file', help="")
parser.add_argument('--binaries', help="")
parser.add_argument('--seeded-config', help="")
- parser.add_argument('--nightly', action="store_true", help="")
parser.add_argument('--codesign', default="", help="")
args = parser.parse_args()
@@ -63,6 +103,11 @@ def main():
"specify a binaries path"
binaries_path = os.path.realpath(args.binaries)
+ assert args.versions_file is not None, \
+ "You need to specify a versions file with the versions to use " \
+ "for each package."
+ versions_path = os.path.realpath(args.versions_file)
+
seeded_config = None
if args.seeded_config is not None:
seeded_config = os.path.realpath(args.seeded_config)
@@ -74,7 +119,11 @@ def main():
return t(bd, args.skip, args.do)
gc = init(GitCloneAll)
- gc.run(sorted_repos, args.nightly)
+ gc.run(sorted_repos)
+
+ # NOTE: NEW...
+ gco = init(GitCheckout)
+ gco.run(sorted_repos, versions_path)
ps = init(PythonSetupAll)
ps.run(sorted_repos, binaries_path)
@@ -100,7 +149,7 @@ def main():
fd.run()
cm = init(CopyMisc)
- cm.run(binaries_path)
+ cm.run(binaries_path, get_tuf_repo(versions_path))
pyc = init(PycRemover)
pyc.run()
@@ -117,17 +166,19 @@ def main():
sc = init(SeededConfig)
sc.run(seeded_config)
+ version = get_version(versions_path)
+
if IS_MAC:
dm = init(DmgIt)
- dm.run(sorted_repos, args.nightly)
+ dm.run(sorted_repos, version)
elif IS_WIN:
zi = init(ZipIt)
- zi.run(sorted_repos, args.nightly)
+ zi.run(sorted_repos, version)
else:
ru = init(RemoveUnused)
ru.run()
ti = init(TarballIt)
- ti.run(sorted_repos, args.nightly)
+ ti.run(sorted_repos, version)
# do manifest on windows
diff --git a/createbundle.sh b/createbundle.sh
index 825fb43..1336263 100755
--- a/createbundle.sh
+++ b/createbundle.sh
@@ -40,6 +40,11 @@
set -e # Exit immediately if a command exits with a non-zero status.
BASE='/home/leap/bitmask.bundle'
+
+SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
+VERSIONS_FILE="$SCRIPT_DIR/bitmask.json"
+BINARY_COPIER="$SCRIPT_DIR/copy-binaries.sh"
+
mkdir -p $BASE
# Note: we could use:
@@ -52,6 +57,7 @@ else
fi
install_dependencies() {
+ sudo apt-get update # update apt, otherwise some packages may not be found
sudo apt-get install -y build-essential python-dev cmake git autoconf \
libtool liblzo2-dev libqt4-dev libxml2-dev libxslt1-dev qtmobility-dev \
libsqlite3-dev libffi-dev python-virtualenv
@@ -120,7 +126,7 @@ set_pyside_environment() {
copy_binaries() {
cd $BASE
- ../copy-binaries.sh
+ $BINARY_COPIER
}
create_bundler_paths() {
@@ -145,9 +151,9 @@ setup_bundler() {
git clone https://github.com/leapcode/bitmask_bundler.git bitmask_bundler.git
virtualenv bundler.venv && source bundler.venv/bin/activate
- # install dependencies by hand...
- pip install psutil
- pip install tuf # used in the launher, it is not in any requirements.txt
+ # HACK: install these dependencies manually since they can't be installed normally due not passing some pip checks.
+ pip install --upgrade pip
+ pip install --allow-external u1db --allow-unverified u1db --allow-external dirspec --allow-unverified dirspec u1db dirspec
git clone https://github.com/chiiph/protobuf-socket-rpc protobuf-socket-rpc.git
cd protobuf-socket-rpc.git
@@ -156,28 +162,58 @@ setup_bundler() {
pip install -r $BASE/bitmask_bundler.git/pkg/requirements.pip
}
+clean() {
+ cd $BASE
+ rm -fr bitmask_bundler.git bundler.venv protobuf-socket-rpc.git bundler.output
+}
+
run_bundler() {
cd $BASE
# if the virtualenv is not sourced, then source it!
- # this is helpful if you want to run this step only
+ # this is helpful if you want to run only this step
[[ -z "$VIRTUAL_ENV" ]] && source bundler.venv/bin/activate
set_pyside_environment
mkdir bundler.output
- python bitmask_bundler.git/bundler/main.py --workon bundler.output --binaries binaries/ --paths-file bundler.paths --do gitclone pythonsetup $VERSION
- python bitmask_bundler.git/bundler/main.py --workon bundler.output --binaries binaries/ --paths-file bundler.paths --skip gitclone pythonsetup $VERSION
+
+ # Use a shortcut for the bundler command
+ bundler="python bitmask_bundler.git/bundler/main.py --workon bundler.output --binaries binaries/ --paths-file bundler.paths --versions-file $VERSIONS_FILE"
+
+ $bundler --do gitclone
+ $bundler --do gitcheckout
+
+ $bundler --do pythonsetup
+ $bundler --skip gitclone gitcheckout pythonsetup
}
-[[ "$1" == 'nightly' ]] && VERSION='--nightly'
+REUSE_BINARIES=$BASE/reuse-binaries.lock
+
+if [[ ! -f $BINARY_COPIER ]]; then
+ echo "ERROR: missing $BINARY_COPIER file."
+ exit 1
+fi
+
+if [[ ! -f $VERSIONS_FILE ]]; then
+ echo "ERROR: missing $VERSIONS_FILE file."
+ exit 1
+fi
+
+if [[ ! -f $REUSE_BINARIES ]]; then
+ install_dependencies
+ build_boost
+ build_launcher
+ build_openvpn
+ build_pyside
+ copy_binaries
+ create_bundler_paths
+else
+ echo "Reusing existing binaries, cleaning up before creating a new bundle..."
+ clean
+fi
-install_dependencies
-build_boost
-build_launcher
-build_openvpn
-build_pyside
-copy_binaries
-create_bundler_paths
setup_bundler
run_bundler
+
+echo 'If you remove this file the createbundle.sh script will rebuild all the binaries.' > $REUSE_BINARIES
diff --git a/pkg/requirements.pip b/pkg/requirements.pip
index 60186b8..06d6c02 100644
--- a/pkg/requirements.pip
+++ b/pkg/requirements.pip
@@ -1,3 +1,5 @@
-sh
-modulegraph
altgraph
+modulegraph
+psutil
+sh
+tuf
diff --git a/tuf-stuff.sh b/tuf-stuff.sh
index 8aa83ec..b8de3f3 100755
--- a/tuf-stuff.sh
+++ b/tuf-stuff.sh
@@ -35,7 +35,7 @@ cc_normal="${esc}[39m"
show_help() {
cat << EOF
-Usage: ${0##*/} [-h] [-r FILE] [-s] [-a (32|64)] -v VERSION -k KEY_FILE
+Usage: ${0##*/} [-h] [-r FILE] [-s] [-a (32|64)] -v VERSION -k KEY_FILE -R (S|U)
Do stuff for version VERSION and arch ARCH.
-h display this help and exit.
@@ -44,6 +44,7 @@ Do stuff for version VERSION and arch ARCH.
-r FILE use particular repo/ file to do the tuf stuff. FILE must be a .tar.gz file.
-s run the setup process, create virtualenv and install dependencies.
-v VERSION version to work with. This is a mandatory argument.
+ -R REPO use the (S)table or (U)nstable TUF web repo.
EOF
}
@@ -54,7 +55,7 @@ get_args() {
ARCH="64"
SETUP="NO"
- while getopts "hr:sv:a:k:" opt; do
+ while getopts "hr:sv:a:k:R:" opt; do
case "$opt" in
h)
show_help
@@ -70,6 +71,8 @@ get_args() {
;;
a) ARCH=$OPTARG
;;
+ R) WEB_REPO=$OPTARG
+ ;;
'?')
show_help >&2
exit 1
@@ -88,6 +91,17 @@ get_args() {
show_help
exit 1
fi
+ if [[ -z $WEB_REPO ]]; then
+ echo 'Error: missing -R flag'
+ show_help
+ exit 1
+ else
+ if [[ $WEB_REPO != 'S' && $WEB_REPO != 'U' ]]; then
+ echo 'Error: invalid parameter for the -R flag'
+ show_help
+ exit 2
+ fi
+ fi
echo "---------- settings ----------"
echo "Arch: $ARCH"
@@ -95,6 +109,7 @@ get_args() {
echo "Repo: $REPO"
echo "Setup: $SETUP"
echo "Version: $VERSION"
+ echo "Web repo: $WEB_REPO"
echo "--------------------"
read -p "Press <Enter> to continue, <Ctrl>+C to exit. "
}
@@ -109,7 +124,15 @@ do_init(){
VENVDIR=$WORKDIR/tuf.venv
BITMASK="Bitmask-linux$ARCH-$VERSION"
- RELEASE=$BASE/../bitmask_client/pkg/tuf/release.py
+ RELEASE=$BASE/release.py
+
+ if [[ ! -f $RELEASE ]]; then
+ echo "ERROR: you need to copy the release.py file into this directory."
+ fi
+
+ if [[ ! -f $KEY_FILE ]]; then
+ echo "ERROR: the specified key file does not exist."
+ fi
# Initialize path
mkdir -p $WORKDIR
@@ -141,10 +164,16 @@ do_tuf_stuff() {
TUF_ARCH='linux-i386'
fi
+ if [[ $WEB_REPO == 'S' ]]; then
+ TUF_URL=https://dl.bitmask.net/tuf/$TUF_ARCH/metadata/
+ else
+ TUF_URL=https://dl.bitmask.net/tuf-unstable/$TUF_ARCH/metadata/
+ fi
+
if [[ -z $REPO ]]; then
# Download old repo metadata
echo "${cc_yellow}-> Downloading metadata files from the old bundle...${cc_normal}"
- wget --quiet --recursive --no-host-directories --cut-dirs=2 --no-parent --reject "index.html*" https://dl.bitmask.net/tuf/$TUF_ARCH/metadata/
+ wget --quiet --recursive --no-host-directories --cut-dirs=2 --no-parent --reject "index.html*" $TUF_URL
mv metadata metadata.staged
else
echo "${cc_yellow}-> Extracting metadata files from the repo file...${cc_normal}"
diff --git a/tuf/init.py b/tuf/init.py
new file mode 100755
index 0000000..7300da0
--- /dev/null
+++ b/tuf/init.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+# init.py
+# Copyright (C) 2014 LEAP
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Tool to initialize a TUF repo.
+
+The keys can be generated with:
+ openssl genrsa -des3 -out private.pem 4096
+The public key can be exported with:
+ openssl rsa -in private.pem -outform PEM -pubout -out public.pem
+"""
+
+import sys
+
+from tuf.repository_tool import create_new_repository
+from tuf.repository_tool import import_rsa_privatekey_from_file
+from tuf.repository_tool import import_rsa_publickey_from_file
+
+
+def usage():
+ print ("Usage: %s repo root_private_key root_pub_key targets_pub_key"
+ " timestamp_pub_key") % (sys.argv[0],)
+
+
+def main():
+ if len(sys.argv) < 6:
+ usage()
+ return
+
+ repo_path = sys.argv[1]
+ root_priv_path = sys.argv[2]
+ root_pub_path = sys.argv[3]
+ targets_pub_path = sys.argv[4]
+ timestamp_pub_path = sys.argv[5]
+ repo = Repo(repo_path, root_priv_path)
+ repo.build(root_pub_path, targets_pub_path, timestamp_pub_path)
+
+ print "%s/metadata.staged/root.json is ready" % (repo_path,)
+
+
+class Repo(object):
+ """
+ Repository builder class
+ """
+
+ def __init__(self, repo_path, key_path):
+ """
+ Constructor
+
+ :param repo_path: path where the repo lives
+ :type repo_path: str
+ :param key_path: path where the private root key lives
+ :type key_path: str
+ """
+ self._repo_path = repo_path
+ self._key = import_rsa_privatekey_from_file(key_path)
+
+ def build(self, root_pub_path, targets_pub_path, timestamp_pub_path):
+ """
+ Create a new repo
+
+ :param root_pub_path: path where the public root key lives
+ :type root_pub_path: str
+ :param targets_pub_path: path where the public targets key lives
+ :type targets_pub_path: str
+ :param timestamp_pub_path: path where the public timestamp key lives
+ :type timestamp_pub_path: str
+ """
+ repository = create_new_repository(self._repo_path)
+
+ pub_root_key = import_rsa_publickey_from_file(root_pub_path)
+ repository.root.add_verification_key(pub_root_key)
+ repository.root.load_signing_key(self._key)
+
+ pub_target_key = import_rsa_publickey_from_file(targets_pub_path)
+ repository.targets.add_verification_key(pub_target_key)
+ repository.snapshot.add_verification_key(pub_target_key)
+ repository.targets.compressions = ["gz"]
+ repository.snapshot.compressions = ["gz"]
+
+ pub_timestamp_key = import_rsa_publickey_from_file(timestamp_pub_path)
+ repository.timestamp.add_verification_key(pub_timestamp_key)
+
+ repository.write_partial()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/tuf/release.py b/tuf/release.py
new file mode 100755
index 0000000..0e1c989
--- /dev/null
+++ b/tuf/release.py
@@ -0,0 +1,135 @@
+#!/usr/bin/env python
+# release.py
+# Copyright (C) 2014 LEAP
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <http://www.gnu.org/licenses/>.
+
+"""
+Tool to generate TUF related files after a release
+
+The 'repo' folder should contain two folders:
+ - 'metadata.staged' with all the jsons from the previows release
+ - 'targets' where the release targets are
+"""
+
+import datetime
+import os.path
+import sys
+
+from tuf.repository_tool import load_repository
+from tuf.repository_tool import import_rsa_privatekey_from_file
+
+"""
+Days until the expiration of targets.json and snapshot.json. After this ammount
+of days the TUF client won't accept this files.
+"""
+EXPIRATION_DAYS = 90
+
+
+def usage():
+ print "Usage: %s repo key" % (sys.argv[0],)
+
+
+def main():
+ if len(sys.argv) < 3:
+ usage()
+ return
+
+ repo_path = sys.argv[1]
+ key_path = sys.argv[2]
+ targets = Targets(repo_path, key_path)
+ targets.build()
+
+ print "%s/metadata.staged/(targets|snapshot).json[.gz] are ready" % \
+ (repo_path,)
+
+
+class Targets(object):
+ """
+ Targets builder class
+ """
+
+ def __init__(self, repo_path, key_path):
+ """
+ Constructor
+
+ :param repo_path: path where the repo lives
+ :type repo_path: str
+ :param key_path: path where the private targets key lives
+ :type key_path: str
+ """
+ self._repo_path = repo_path
+ self._key = import_rsa_privatekey_from_file(key_path)
+
+ def build(self):
+ """
+ Generate snapshot.json[.gz] and targets.json[.gz]
+ """
+ self._repo = load_repository(self._repo_path)
+ self._load_targets()
+
+ self._repo.targets.load_signing_key(self._key)
+ self._repo.snapshot.load_signing_key(self._key)
+ self._repo.targets.compressions = ["gz"]
+ self._repo.snapshot.compressions = ["gz"]
+ self._repo.snapshot.expiration = (
+ datetime.datetime.now() +
+ datetime.timedelta(days=EXPIRATION_DAYS))
+ self._repo.targets.expiration = (
+ datetime.datetime.now() +
+ datetime.timedelta(days=EXPIRATION_DAYS))
+ self._repo.write_partial()
+
+ def _load_targets(self):
+ """
+ Load a list of targets
+ """
+ targets_path = os.path.join(self._repo_path, 'targets')
+ target_list = self._repo.get_filepaths_in_directory(
+ targets_path,
+ recursive_walk=True,
+ followlinks=True)
+
+ self._remove_obsolete_targets(target_list)
+
+ for target in target_list:
+ octal_file_permissions = oct(os.stat(target).st_mode)[3:]
+ custom_file_permissions = {
+ 'file_permissions': octal_file_permissions
+ }
+ self._repo.targets.add_target(target, custom_file_permissions)
+
+ def _remove_obsolete_targets(self, target_list):
+ """
+ Remove obsolete targets from TUF targets
+
+ :param target_list: list of targets on full path comming from TUF
+ get_filepaths_in_directory
+ :type target_list: list(str)
+ """
+ targets_path = os.path.join(self._repo_path, 'targets')
+ relative_path_list = map(lambda t: t.split("/targets")[1], target_list)
+ removed_targets = (set(self._repo.targets.target_files.keys())
+ - set(relative_path_list))
+
+ for target in removed_targets:
+ target_rel_path = target
+ if target[0] == '/':
+ target_rel_path = target[1:]
+ target_path = os.path.join(targets_path, target_rel_path)
+ self._repo.targets.remove_target(target_path)
+
+
+if __name__ == "__main__":
+ main()