summaryrefslogtreecommitdiff
path: root/lib/thandy
diff options
context:
space:
mode:
authorNick Mathewson <nickm@torproject.org>2008-10-14 05:10:30 +0000
committerNick Mathewson <nickm@torproject.org>2008-10-14 05:10:30 +0000
commitd90990ee3ecd09a2725b8051759a900ebd488b8c (patch)
treefbe68d0c4b698d45bfdcb6c91a43ea2e60c21329 /lib/thandy
parentfb5a6115a6f3ea0216e3ca0645ba1eb31fb02876 (diff)
Rename glider to thandy, based on discussions on #nottor. Please let me know ASAP if there is another program Thandy, or if it means something rude, or whatever.
git-svn-id: file:///home/or/svnrepo/updater/trunk@17085 55e972cd-5a19-0410-ae62-a4d7a52db4cd
Diffstat (limited to 'lib/thandy')
-rw-r--r--lib/thandy/ClientCLI.py66
-rw-r--r--lib/thandy/ServerCLI.py187
-rw-r--r--lib/thandy/SignerCLI.py313
-rw-r--r--lib/thandy/__init__.py35
-rw-r--r--lib/thandy/checkJson.py274
-rw-r--r--lib/thandy/download.py127
-rw-r--r--lib/thandy/formats.py747
-rw-r--r--lib/thandy/keys.py399
-rw-r--r--lib/thandy/master_keys.py5
-rw-r--r--lib/thandy/repository.py313
-rw-r--r--lib/thandy/tests.py64
-rw-r--r--lib/thandy/util.py73
12 files changed, 2603 insertions, 0 deletions
diff --git a/lib/thandy/ClientCLI.py b/lib/thandy/ClientCLI.py
new file mode 100644
index 0000000..702ebbf
--- /dev/null
+++ b/lib/thandy/ClientCLI.py
@@ -0,0 +1,66 @@
+
+import os
+import sys
+import getopt
+
+import thandy.util
+import thandy.repository
+import thandy.download
+
+def update(args):
+ repoRoot = thandy.util.userFilename("cache")
+ options, args = getopt.getopt(args, "", [ "repo=", "no-download" ])
+ download = True
+
+ for o, v in options:
+ if o == '--repo':
+ repoRoot = v
+ elif o == "--no-download":
+ download = False
+
+ repo = thandy.repository.LocalRepository(repoRoot)
+
+ files = repo.getFilesToUpdate(trackingBundles=args)
+
+ if not download:
+ return
+
+ mirrorlist = repo.getMirrorlistFile().get()
+
+ downloader = thandy.download.Downloads()
+ downloader.start()
+
+ for f in files:
+ # XXXX Use hash.
+ dj = thandy.download.DownloadJob(f, repo.getFilename(f),
+ mirrorlist)
+ downloader.addDownloadJob(dj)
+ # XXXX replace file in repository if ok; reload; see what changed.
+
+ # Wait for in-progress jobs
+
+# Check my repository
+
+# Tell me what I need to download
+
+# Download stuff
+
+# Tell me what to install.
+
+def usage():
+ print "Known commands:"
+ print " update [--repo=repository] [--no-download]"
+ sys.exit(1)
+
+def main():
+ if len(sys.argv) < 2:
+ usage()
+ cmd = sys.argv[1]
+ args = sys.argv[2:]
+ if cmd in [ "update" ]:
+ globals()[cmd](args)
+ else:
+ usage()
+
+if __name__ == '__main__':
+ main()
diff --git a/lib/thandy/ServerCLI.py b/lib/thandy/ServerCLI.py
new file mode 100644
index 0000000..47fa912
--- /dev/null
+++ b/lib/thandy/ServerCLI.py
@@ -0,0 +1,187 @@
+
+import os
+import sys
+import getopt
+import time
+
+import simplejson
+
+import thandy.formats
+import thandy.util
+import thandy.keys
+
+def tstamp():
+ return time.strftime("%Y%m%d_%H%M%S", time.localtime())
+
+def snarf(fname):
+ f = open(fname, 'rb')
+ try:
+ return f.read()
+ finally:
+ f.close()
+
+def snarfObj(fname):
+ f = open(fname, 'r')
+ try:
+ return simplejson.load(f)
+ finally:
+ f.close()
+
+def insert(args):
+ repo = os.environ.get("THANDY_MASTER_REPO")
+ backupDir = thandy.util.userFilename("old_files")
+ checkSigs = True
+
+ options, args = getopt.getopt(args, "", ["repo=", "no-check"])
+ for o,v in options:
+ if o == "--repo":
+ repo = v
+ elif o == "--no-check":
+ checkSigs = False
+
+ if not repo:
+ print "No repository specified."
+ usage()
+ if not os.path.exists(repo):
+ print "No such repository as %r"%repo
+ usage()
+
+ if not os.path.exists(backupDir):
+ os.makedirs(backupDir, 0700)
+
+ if checkSigs:
+ keys = thandy.util.getKeylist(os.path.join(repo, "meta/keys.txt"))
+ else:
+ keys = None
+
+ n_ok = 0
+ for fn in args:
+ print "Loading %s..."%fn
+ try:
+ content = snarf(fn)
+ except OSError, e:
+ print "Couldn't open %s: %s"%(fn, e)
+ continue
+
+ try:
+ obj = simplejson.loads(content)
+ except ValueError, e:
+ print "Couldn't decode %s: %s"%(fn, e)
+ continue
+
+ try:
+ ss, r, path = thandy.formats.checkSignedObj(obj, keys)
+ except thandy.FormatException, e:
+ print "Bad format on %s: %s"%(fn, e)
+ continue
+ if checkSigs and not ss.isValid():
+ print "Not enough valid signatures on %s"%fn
+ continue
+
+ print " Looks okay. It goes in %s"%path
+ assert path.startswith("/")
+ targetPath = os.path.join(repo, path[1:])
+ if os.path.exists(targetPath):
+ oldContents = snarf(targetPath)
+ if oldContents == content:
+ print " File unchanged!"
+ n_ok += 1
+ continue
+
+ baseFname = "%s_%s" % (tstamp(), os.path.split(path)[1])
+ backupFname = os.path.join(backupDir, baseFname)
+ print " Copying old file to %s"%backupFname
+ thandy.util.replaceFile(backupFname, oldContents)
+
+ parentDir = os.path.split(targetPath)[0]
+ if not os.path.exists(parentDir):
+ print " Making %s"%parentDir
+ os.makedirs(parentDir, 0755)
+ print " Replacing file..."
+ thandy.util.replaceFile(targetPath, content)
+ print " Done."
+ n_ok += 1
+ if n_ok != len(args):
+ sys.exit(1)
+
+def timestamp(args):
+ repo = os.environ.get("THANDY_MASTER_REPO")
+ ts_keyfile = thandy.util.userFilename("timestamp_key")
+
+ options, args = getopt.getopt(args, "", ["repo=", "ts-key="])
+ for o,v in options:
+ if o == "--repo":
+ repo = v
+ elif o == "--ts-key":
+ ts_keyfile = v
+
+ if repo == None:
+ print "No repository specified."
+ usage()
+ if not os.path.exists(repo):
+ print "No such repository as %r"%repo
+ usage()
+
+ tsFname = os.path.join(repo, "meta/timestamp.txt")
+
+ try:
+ mObj = snarfObj(os.path.join(repo, "meta/mirrors.txt"))
+ except OSError:
+ print "No mirror list!"
+ sys.exit(1)
+ try:
+ kObj = snarfObj(os.path.join(repo, "meta/keys.txt"))
+ except OSError:
+ print "No key list!"
+ sys.exit(1)
+
+ bundles = []
+ for dirpath, dirname, fns in os.walk(os.path.join(repo, "bundleinfo")):
+ for fn in fns:
+ try:
+ bObj = snarfObj(fn)
+ except (ValueError, OSError), e:
+ print "(Couldn't read bundle-like %s)"%fn
+ continue
+ try:
+ _, r, _ = thandy.formats.checkSignedObj(bObj)
+ except thandy.FormatException, e:
+ print "Problem reading object from %s"%fn
+ continue
+ if r != "bundle":
+ print "%s was not a good bundle"%fn
+ continue
+ bundles.append(bObj['signed'])
+
+ timestamp = thandy.formats.makeTimestampObj(
+ mObj['signed'], kObj['signed'], bundles)
+ signable = thandy.formats.makeSignable(timestamp)
+
+ keydb = thandy.formats.Keylist()
+ #XXXX Still a roundabout way to do this.
+ keylist = thandy.formats.makeKeylistObj(ts_keyfile, True)
+ keydb.addFromKeylist(keylist)
+ for k in keydb.iterkeys():
+ thandy.formats.sign(signable, k)
+
+ content = simplejson.dumps(signable, sort_keys=True)
+ thandy.util.replaceFile(tsFname, content)
+
+def usage():
+ print "Known commands:"
+ print " insert [--no-check] [--repo=repository] file ..."
+ print " timestamp [--repo=repository]"
+ sys.exit(1)
+
+def main():
+ if len(sys.argv) < 2:
+ usage()
+ cmd = sys.argv[1]
+ args = sys.argv[2:]
+ if cmd in [ "insert", "timestamp" ]:
+ globals()[cmd](args)
+ else:
+ usage()
+
+if __name__ == '__main__':
+ main()
diff --git a/lib/thandy/SignerCLI.py b/lib/thandy/SignerCLI.py
new file mode 100644
index 0000000..4facc2e
--- /dev/null
+++ b/lib/thandy/SignerCLI.py
@@ -0,0 +1,313 @@
+
+import os
+import getopt
+import sys
+import logging
+import simplejson
+
+import thandy.keys
+import thandy.formats
+
+def getKeyStore():
+ return thandy.keys.KeyStore(thandy.util.userFilename("secret_keys"))
+
+def dumpKey(key, indent=0):
+ i = " "*indent
+ print "%s%s"%(i, key.getKeyID())
+ for r, p in key.getRoles():
+ print " %s%s\t%s"%(i, r, p)
+
+def getKey(ks, keyid=None, role=None, path=None):
+ if keyid is not None:
+ keys = ks.getKeysFuzzy(keyid)
+ if None not in (role, path):
+ keys = [ k for k in keys if k.hasRole(role, path) ]
+ elif None not in (role, path):
+ keys = ks.getKeysByRole(role, path)
+ else:
+ assert False
+ if len(keys) < 1:
+ print "No such key.\nI wanted",
+ if keyid: print "keyid='%s...'"%keyid,
+ if None not in (role, path): print "role=%s, path=%s"%(role,path),
+ print
+ print "I only know about:"
+ for k in ks.iterkeys():
+ dumpKey(k)
+ sys.exit(1)
+ elif len(keys) > 1:
+ print "Multiple keys match. Possibilities are:"
+ for k in keys:
+ dumpKey(k)
+ sys.exit(1)
+ else:
+ return keys[0]
+
+# ------------------------------
+
+def makepackage(args):
+ options, args = getopt.getopt(args, "", "keyid=")
+ keyid = None
+ for o,v in options:
+ if o == "--keyid":
+ keyid = v
+
+ if len(args) < 2:
+ usage()
+
+ configFile = args[0]
+ dataFile = args[1]
+ print "Generating package."
+ package = thandy.formats.makePackageObj(configFile, dataFile)
+ relpath = package['location']
+ print "need a key with role matching [package %s]"%relpath
+ ks = getKeyStore()
+ ks.load()
+ key = getKey(ks, keyid=keyid, role='package', path=relpath)
+ signable = thandy.formats.makeSignable(package)
+ thandy.formats.sign(signable, key)
+
+ if 1:
+ ss, r, p = thandy.formats.checkSignedObj(signable, ks)
+ assert ss.isValid()
+
+ location = os.path.split(package['location'])[-1]
+ print "Writing signed package to %s"%location
+ f = open(location, 'w')
+ simplejson.dump(signable, f, indent=1)
+ f.close()
+
+def makebundle(args):
+ options, args = getopt.getopt(args, "", "keyid=")
+ keyid = None
+ for o,v in options:
+ if o == "--keyid":
+ keyid = v
+
+ if len(args) < 2:
+ usage()
+
+ configFile = args[0]
+ packages = {}
+ for pkgFile in args[1:]:
+ print "Loading", pkgFile
+ f = open(pkgFile, 'r')
+ p = simplejson.load(f)
+ f.close()
+ _, r, _ = thandy.formats.checkSignedObj(p)
+ if r != 'package':
+ print pkgFile, "was not a package"
+ packages[p['signed']['location']] = p
+
+ def getHash(path):
+ p = packages[path]
+ return thandy.formats.getDigest(p['signed'])
+
+ bundleObj = thandy.formats.makeBundleObj(configFile, getHash)
+ signable = thandy.formats.makeSignable(bundleObj)
+
+ ks = getKeyStore()
+ ks.load()
+ key = getKey(ks, keyid=keyid, role="bundle", path=bundleObj['location'])
+ thandy.formats.sign(signable, key)
+
+ if 1:
+ ss, r, p = thandy.formats.checkSignedObj(signable, ks)
+ assert ss.isValid()
+
+ location = os.path.split(bundleObj['location'])[-1]
+ print "Writing signed bundle to %s"%location
+ f = open(location, 'w')
+ simplejson.dump(signable, f, indent=1)
+ f.close()
+
+# ------------------------------
+def makekeylist(args):
+ options, args = getopt.getopt(args, "", "keyid=")
+ keyid = None
+ for o,v in options:
+ if o == "--keyid":
+ keyid = v
+
+ if len(args) < 1:
+ usage()
+
+ keylist = thandy.formats.makeKeylistObj(args[0])
+ signable = thandy.formats.makeSignable(keylist)
+
+ ks = getKeyStore()
+ ks.load()
+ key = getKey(ks, keyid=keyid, role="master", path="/meta/keys.txt")
+ thandy.formats.sign(signable, key)
+
+ if 1:
+ ss, r, p = thandy.formats.checkSignedObj(signable, ks)
+ assert ss.isValid()
+
+ print "writing signed keylist to keys.txt"
+ thandy.util.replaceFile("keys.txt",
+ simplejson.dumps(signable, indent=1, sort_keys=True),
+ textMode=True)
+
+def signkeylist(args):
+ if len(args) != 1:
+ usage()
+
+ keylist = simplejson.load(open(args[0], 'r'))
+ thandy.formats.SIGNED_SCHEMA.checkMatch(keylist)
+ thandy.formats.KEYLIST_SCHEMA.checkMatch(keylist['signed'])
+
+ ks = getKeyStore()
+ ks.load()
+ keys = ks.getKeysByRole("master", "/meta/keys.txt")
+ for k in keys:
+ thandy.formats.sign(keylist, k)
+
+ print "writing signed keylist to keys.txt"
+ thandy.util.replaceFile("keys.txt",
+ simplejson.dumps(keylist, indent=1, sort_keys=True),
+ textMode=True)
+
+def makemirrorlist(args):
+ options, args = getopt.getopt(args, "", "keyid=")
+ keyid = None
+ for o,v in options:
+ if o == "--keyid":
+ keyid = v
+
+ if len(args) < 1:
+ usage()
+
+ mirrorlist = thandy.formats.makeMirrorListObj(args[0])
+ signable = thandy.formats.makeSignable(mirrorlist)
+
+ ks = getKeyStore()
+ ks.load()
+ key = getKey(ks, keyid=keyid, role='mirrors', path="/meta/mirrors.txt")
+ thandy.formats.sign(signable, key)
+
+ if 1:
+ ss, r, p = thandy.formats.checkSignedObj(signable, ks)
+ assert ss.isValid()
+
+ print "writing signed mirrorlist to mirrors.txt"
+ thandy.util.replaceFile("mirrors.txt",
+ simplejson.dumps(signable, indent=1, sort_keys=True),
+ textMode=True)
+
+# ------------------------------
+
+def keygen(args):
+ k = getKeyStore()
+ k.load()
+ print "Generating key. This will be slow."
+ key = thandy.keys.RSAKey.generate()
+ print "Generated new key: %s" % key.getKeyID()
+ k.addKey(key)
+ k.save()
+
+def listkeys(args):
+ k = getKeyStore()
+ k.load()
+ for k in k.iterkeys():
+ print k.getKeyID()
+ for r, p in k.getRoles():
+ print " ", r, p
+
+def addrole(args):
+ if len(args) < 3:
+ usage()
+ ks = getKeyStore()
+ ks.load()
+ k = getKey(ks, args[0])
+ r = args[1]
+ if r not in thandy.formats.ALL_ROLES:
+ print "Unrecognized role %r. Known roles are %s"%(
+ r,", ".join(thandy.format.ALL_ROLES))
+ sys.exit(1)
+ p = args[2]
+ k.addRole(r, p)
+ ks.save()
+
+def delrole(args):
+ if len(args) < 3:
+ usage()
+ ks = getKeyStore()
+ ks.load()
+ k = getKey(ks, args[0])
+ r = args[1]
+ if r not in thandy.formats.ALL_ROLES:
+ print "Unrecognized role %r. Known roles are %s"%(
+ r,", ".join(thandy.format.ALL_ROLES))
+ sys.exit(1)
+ p = args[2]
+
+ #XXXX rep.
+ origLen = len(k._roles)
+ k._roles = [ (role,path) for role,path in k._roles
+ if (role,path) != (r,p) ]
+ removed = origLen - len(k._roles)
+ print removed, "roles removed"
+ if removed:
+ ks.save()
+
+def chpass(args):
+ ks = getKeyStore()
+ print "Old password."
+ ks.load()
+ print "New password."
+ ks.clearPassword()
+ ks.save()
+
+def dumpkey(args):
+ options, args = getopt.getopt(args, "", ["include-secret", "passwd="])
+
+ includeSecret = False
+ for o,v in options:
+ if o == '--include-secret':
+ includeSecret = True
+ else:
+ print "Unexpected %r"%o
+
+ ks = getKeyStore()
+ ks.load()
+
+ keys = []
+ if len(args):
+ keys = [ getKey(ks, a) for a in args ]
+ else:
+ keys = list(ks.iterkeys())
+
+ for k in keys:
+ data = k.format(private=includeSecret, includeRoles=True)
+ print "Key(", simplejson.dumps(data, indent=2), ")"
+
+def usage():
+ print "Known commands:"
+ print " keygen"
+ print " listkeys"
+ print " chpass"
+ print " addrole keyid role path"
+ print " delrole keyid role path"
+ print " dumpkey [--include-secret] keyid"
+ print " makepackage config datafile"
+ print " makebundle config packagefile ..."
+ print " signkeylist keylist"
+ print " makekeylist keylist"
+ print " makemirrorlist config"
+ sys.exit(1)
+
+def main():
+ if len(sys.argv) < 2:
+ usage()
+ cmd = sys.argv[1]
+ args = sys.argv[2:]
+ if cmd in [ "keygen", "listkeys", "addrole", "delrole", "chpass",
+ "dumpkey", "makepackage", "makebundle", "signkeylist",
+ "makekeylist", "signkeylist", "makemirrorlist", ]:
+ globals()[cmd](args)
+ else:
+ usage()
+
+if __name__ == '__main__':
+ main()
diff --git a/lib/thandy/__init__.py b/lib/thandy/__init__.py
new file mode 100644
index 0000000..87fd983
--- /dev/null
+++ b/lib/thandy/__init__.py
@@ -0,0 +1,35 @@
+
+__all__ = [ 'formats' ]
+
+_BaseException = Exception
+
+class Exception(_BaseException):
+ pass
+
+class FormatException(Exception):
+ pass
+
+class UnknownFormat(FormatException):
+ pass
+
+class BadSignature(Exception):
+ pass
+
+class BadPassword(Exception):
+ pass
+
+class InternalError(Exception):
+ pass
+
+class RepoError(InternalError):
+ pass
+
+class CryptoError(Exception):
+ pass
+
+class PubkeyFormatException(FormatException):
+ pass
+
+class UnknownMethod(CryptoError):
+ pass
+
diff --git a/lib/thandy/checkJson.py b/lib/thandy/checkJson.py
new file mode 100644
index 0000000..e6b6c6e
--- /dev/null
+++ b/lib/thandy/checkJson.py
@@ -0,0 +1,274 @@
+
+import re
+import sys
+
+import thandy
+
+class Schema:
+ def matches(self, obj):
+ try:
+ self.checkMatch(obj)
+ except thandy.FormatException:
+ return False
+ else:
+ return True
+
+ def checkMatch(self, obj):
+ raise NotImplemented()
+
+class Any(Schema):
+ """
+ >>> s = Any()
+ >>> s.matches("A String")
+ True
+ >>> s.matches([1, "list"])
+ True
+ """
+ def checkMatch(self, obj):
+ pass
+
+class RE(Schema):
+ """
+ >>> s = RE("h.*d")
+ >>> s.matches("hello world")
+ True
+ >>> s.matches("Hello World")
+ False
+ >>> s.matches("hello world!")
+ False
+ >>> s.matches([33, "Hello"])
+ False
+ """
+ def __init__(self, pat=None, modifiers=0, reObj=None, reName="pattern"):
+ if not reObj:
+ if not pat.endswith("$"):
+ pat += "$"
+ reObj = re.compile(pat, modifiers)
+ self._re = reObj
+ self._reName = reName
+ def checkMatch(self, obj):
+ if not isinstance(obj, basestring) or not self._re.match(obj):
+ raise thandy.FormatException("%r did not match %s"
+ %(obj,self._reName))
+
+class Str(Schema):
+ """
+ >>> s = Str("Hi")
+ >>> s.matches("Hi")
+ True
+ >>> s.matches("Not hi")
+ False
+ """
+ def __init__(self, val):
+ self._str = val
+ def checkMatch(self, obj):
+ if self._str != obj:
+ raise thandy.FormatException("Expected %r; got %r"%(self._str, obj))
+
+class AnyStr(Schema):
+ """
+ >>> s = AnyStr()
+ >>> s.matches("")
+ True
+ >>> s.matches("a string")
+ True
+ >>> s.matches(["a"])
+ False
+ >>> s.matches(3)
+ False
+ >>> s.matches(u"a unicode string")
+ True
+ >>> s.matches({})
+ False
+ """
+ def __init__(self):
+ pass
+ def checkMatch(self, obj):
+ if not isinstance(obj, basestring):
+ raise thandy.FormatException("Expected a string; got %r"%obj)
+
+class ListOf(Schema):
+ """
+ >>> s = ListOf(RE("(?:..)*"))
+ >>> s.matches("hi")
+ False
+ >>> s.matches([])
+ True
+ >>> s.matches({})
+ False
+ >>> s.matches(["Hi", "this", "list", "is", "full", "of", "even", "strs"])
+ True
+ >>> s.matches(["This", "one", "is not"])
+ False
+ """
+ def __init__(self, schema, minCount=0, maxCount=sys.maxint,listName="list"):
+ self._schema = schema
+ self._minCount = minCount
+ self._maxCount = maxCount
+ self._listName = listName
+ def checkMatch(self, obj):
+ if not isinstance(obj, (list, tuple)):
+ raise thandy.FormatException("Expected %s; got %r"
+ %(self._listName,obj))
+ for item in obj:
+ try:
+ self._schema.checkMatch(item)
+ except thandy.FormatException, e:
+ raise thandy.FormatException("%s in %s"%(e, self._listName))
+
+ if not (self._minCount <= len(obj) <= self._maxCount):
+ raise thandy.FormatException("Length of %s out of range"
+ %self._listName)
+
+class Struct(Schema):
+ """
+ >>> s = Struct([ListOf(AnyStr()), AnyStr(), Str("X")])
+ >>> s.matches(False)
+ False
+ >>> s.matches("Foo")
+ False
+ >>> s.matches([[], "Q", "X"])
+ True
+ >>> s.matches([[], "Q", "D"])
+ False
+ >>> s.matches([[3], "Q", "X"])
+ False
+ >>> s.matches([[], "Q", "X", "Y"])
+ False
+ """
+ def __init__(self, subschemas, allowMore=False, structName="list"):
+ self._subschemas = subschemas[:]
+ self._allowMore = allowMore
+ self._structName = structName
+ def checkMatch(self, obj):
+ if not isinstance(obj, (list, tuple)):
+ raise thandy.FormatException("Expected %s; got %r"
+ %(self._structName,obj))
+ elif len(obj) < len(self._subschemas):
+ raise thandy.FormatException(
+ "Too few fields in %s"%self._structName)
+ elif len(obj) > len(self._subschemas) and not self._allowMore:
+ raise thandy.FormatException(
+ "Too many fields in %s"%self._structName)
+ for item, schema in zip(obj, self._subschemas):
+ schema.checkMatch(item)
+
+class DictOf(Schema):
+ """
+ >>> s = DictOf(RE(r'[aeiou]+'), Struct([AnyStr(), AnyStr()]))
+ >>> s.matches("")
+ False
+ >>> s.matches({})
+ True
+ >>> s.matches({"a": ["x", "y"], "e" : ["", ""]})
+ True
+ >>> s.matches({"a": ["x", 3], "e" : ["", ""]})
+ False
+ >>> s.matches({"a": ["x", "y"], "e" : ["", ""], "d" : ["a", "b"]})
+ False
+ """
+ def __init__(self, keySchema, valSchema):
+ self._keySchema = keySchema
+ self._valSchema = valSchema
+ def checkMatch(self, obj):
+ try:
+ iter = obj.iteritems()
+ except AttributeError:
+ raise thandy.FormatException("Expected a dict; got %r"%obj)
+
+ for k,v in iter:
+ self._keySchema.checkMatch(k)
+ self._valSchema.checkMatch(v)
+
+class Opt:
+ """Helper; applied to a value in Obj to mark it optional.
+
+ >>> s = Obj(k1=Str("X"), k2=Opt(Str("Y")))
+ >>> s.matches({'k1': "X", 'k2': "Y"})
+ True
+ >>> s.matches({'k1': "X", 'k2': "Z"})
+ False
+ >>> s.matches({'k1': "X"})
+ True
+ """
+ def __init__(self, schema):
+ self._schema = schema
+ def checkMatch(self, obj):
+ self._schema.checkMatch(obj)
+
+class Obj(Schema):
+ """
+ >>> s = Obj(a=AnyStr(), bc=Struct([Int(), Int()]))
+ >>> s.matches({'a':"ZYYY", 'bc':[5,9]})
+ True
+ >>> s.matches({'a':"ZYYY", 'bc':[5,9], 'xx':5})
+ True
+ >>> s.matches({'a':"ZYYY", 'bc':[5,9,3]})
+ False
+ >>> s.matches({'a':"ZYYY"})
+ False
+
+ """
+ def __init__(self, _objname="object", **d):
+ self._objname = _objname
+ self._required = d.items()
+
+
+ def checkMatch(self, obj):
+ for k,schema in self._required:
+ try:
+ item = obj[k]
+ except KeyError:
+ if not isinstance(schema, Opt):
+ raise thandy.FormatException("Missing key %s in %s"
+ %(k,self._objname))
+
+ else:
+ try:
+ schema.checkMatch(item)
+ except thandy.FormatException, e:
+ raise thandy.FormatException("%s in %s.%s"
+ %(e,self._objname,k))
+
+
+class Int(Schema):
+ """
+ >>> s = Int()
+ >>> s.matches(99)
+ True
+ >>> s.matches(False)
+ False
+ >>> s.matches(0L)
+ True
+ >>> s.matches("a string")
+ False
+ >>> Int(lo=10, hi=30).matches(25)
+ True
+ >>> Int(lo=10, hi=30).matches(5)
+ False
+ """
+ def __init__(self, lo=-sys.maxint, hi=sys.maxint):
+ self._lo = lo
+ self._hi = hi
+ def checkMatch(self, obj):
+ if isinstance(obj, bool) or not isinstance(obj, (int, long)):
+ # We need to check for bool as a special case, since bool
+ # is for historical reasons a subtype of int.
+ raise thandy.FormatException("Got %r instead of an integer"%obj)
+ elif not (self._lo <= obj <= self._hi):
+ raise thandy.FormatException("%r not in range [%r,%r]"
+ %(obj, self._lo, self._hi))
+
+class Bool(Schema):
+ """
+ >>> s = Bool()
+ >>> s.matches(True) and s.matches(False)
+ True
+ >>> s.matches(11)
+ False
+ """
+ def __init__(self):
+ pass
+ def checkMatch(self, obj):
+ if not isinstance(obj, bool):
+ raise thandy.FormatException("Got %r instead of a boolean"%obj)
diff --git a/lib/thandy/download.py b/lib/thandy/download.py
new file mode 100644
index 0000000..0b22cfa
--- /dev/null
+++ b/lib/thandy/download.py
@@ -0,0 +1,127 @@
+
+
+import urllib2
+import httplib
+import random
+
+import threading, Queue
+
+import thandy.util
+
+class Downloads:
+ def __init__(self, n_threads=2):
+ self._lock = threading.RLock()
+ self.downloads = {}
+ self.haveDownloaded = {}
+ self.downloadQueue = Queue.Queue()
+ self.threads = [ threading.Thread(target=self._thread) ]
+ for t in self.threads:
+ t.setDaemon(True)
+
+ def start(self):
+ for t in self.threads:
+ t.start()
+
+ def isCurrentlyDownloading(self, relPath):
+ self._lock.acquire()
+ try:
+ return self.downloads.has_key(relPath)
+ finally:
+ self._lock.release()
+
+ def isRedundant(self, relPath):
+ self._lock.acquire()
+ try:
+ return (self.downloads.has_key(relPath) or
+ self.haveDownloaded.has_key(relPath))
+ finally:
+ self._lock.release()
+
+ def addDownloadJob(self, job):
+ rp = job.getRelativePath()
+ self._lock.acquire()
+ self.downloads[rp] = job
+ self._lock.release()
+ self.downloadQueue.put(job)
+
+ def _thread(self):
+ while True:
+ job = self.downloadQueue.get()
+ job.download()
+ rp = job.getRelativePath()
+ self._lock.acquire()
+ try:
+ del self.downloads[rp]
+ self.haveDownloaded[rp] = True
+ finally:
+ self._lock.release()
+
+class DownloadJob:
+ def __init__(self, relPath, destPath, mirrorlist=None,
+ wantHash=None, canStall=False):
+ self._relPath = relPath
+ self._wantHash = wantHash
+ self._mirrorList = mirrorlist
+ self._destPath = destPath
+
+ tmppath = thandy.util.userFilename("tmp")
+ if relPath.startswith("/"):
+ relPath = relPath[1:]
+ self._tmppath = os.path.join(tmppath, relPath)
+
+ d = os.path.dirname(self._tmppath)
+ if not os.path.exists(d):
+ os.makedirs(d, 0700)
+
+ def getRelativePath(self):
+ return self._relPath
+
+ def haveStalledFile(self):
+ return os.path.exists(self._tmppath)
+
+ def getURL(self, mirrorlist=None):
+ if mirrorlist is None:
+ mirrorlist = self._mirrorList
+ weightSoFar = 0
+ usable = []
+
+ for m in mirrorlist['mirrors']:
+ for c in m['contents']:
+ # CHECK FOR URL SUITABILITY XXXXX
+
+ if thandy.formats.rolePathMatches(c, self._relPath):
+ weightSoFar += m['weight']
+ usable.append( (weightSoFar, m) )
+ break
+
+ wTarget = random.randint(0, weightSoFar)
+ mirror = None
+ # Could use bisect here instead
+ for w, m in mirrorlist:
+ if w >= wTarget:
+ mirror = m
+ break
+
+ return m['urlbase'] + self._relPath
+
+ def download(self):
+ # XXXX RESUME
+
+ f_in = urllib2.urlopen(self.getURL())
+ f_out = open(self._tmpPath, 'w')
+ while True:
+ c = f_in.read(1024)
+ if not c:
+ break
+ f_out.write(c)
+ f_in.close()
+ f_out.close()
+ # XXXXX retry on failure
+
+ if self._wantHash:
+ gotHash = thandy.formats.getFileDigest(self._tmpPath)
+ if gotHash != self._wantHash:
+ # XXXX Corrupt file.
+ pass
+
+ thandy.utils.moveFile(self._tmpPath, self._destPath)
diff --git a/lib/thandy/formats.py b/lib/thandy/formats.py
new file mode 100644
index 0000000..42d1086
--- /dev/null
+++ b/lib/thandy/formats.py
@@ -0,0 +1,747 @@
+
+import simplejson
+import time
+import re
+import binascii
+import calendar
+
+import thandy.checkJson
+
+import Crypto.Hash.SHA256
+
+class KeyDB:
+ """A KeyDB holds public keys, indexed by their key IDs."""
+ def __init__(self):
+ self._keys = {}
+ def addKey(self, k):
+ keyid = k.getKeyID()
+ try:
+ oldkey = self._keys[keyid]
+ for r, p in oldkey.getRoles():
+ if (r, p) not in k.getRoles():
+ k.addRole(r,p)
+ except KeyError:
+ pass
+ self._keys[k.getKeyID()] = k
+ def getKey(self, keyid):
+ return self._keys[keyid]
+ def getKeysByRole(self, role, path):
+ results = []
+ for key in self._keys.itervalues():
+ for r,p in key.getRoles():
+ if r == role:
+ if rolePathMatches(p, path):
+ results.append(key)
+ return results
+
+ def getKeysFuzzy(self, keyid):
+ r = []
+ for k,v in self._keys.iteritems():
+ if k.startswith(keyid):
+ r.append(v)
+ return r
+ def iterkeys(self):
+ return self._keys.itervalues()
+
+_rolePathCache = {}
+def rolePathMatches(rolePath, path):
+ """Return true iff the relative path in the filesystem 'path' conforms
+ to the pattern 'rolePath': a path that a given key is
+ authorized to sign. Patterns are allowed to contain * to
+ represent one or more characters in a filename, and ** to
+ represent any level of directory structure.
+
+ >>> rolePathMatches("a/b/c/", "a/b/c/")
+ True
+ >>> rolePathMatches("**/c.*", "a/b/c.txt")
+ True
+ >>> rolePathMatches("**/c.*", "a/b/ctxt")
+ False
+ >>> rolePathMatches("**/c.*", "a/b/c.txt/foo")
+ False
+ >>> rolePathMatches("a/*/c", "a/b/c")
+ True
+ >>> rolePathMatches("a/*/c", "a/b/c.txt")
+ False
+ >>> rolePathMatches("a/*/c", "a/b/c.txt") #Check cache
+ False
+ """
+ try:
+ regex = _rolePathCache[rolePath]
+ except KeyError:
+ orig = rolePath
+ # remove duplicate slashes.
+ rolePath = re.sub(r'/+', '/', rolePath)
+ # escape, then ** becomes .*
+ rolePath = re.escape(rolePath).replace(r'\*\*', r'.*')
+ # * becomes [^/]*
+ rolePath = rolePath.replace(r'\*', r'[^/]*')
+ # and no extra text is allowed.
+ rolePath += "$"
+ regex = _rolePathCache[orig] = re.compile(rolePath)
+ return regex.match(path) != None
+
+class SignatureStatus:
+ """Represents the outcome of checking signature(s) on an object."""
+ def __init__(self, good, bad, unrecognized, unauthorized):
+ # keyids for all the valid signatures
+ self._good = good[:]
+ # keyids for the invalid signatures (we had the key, and it failed).
+ self._bad = bad[:]
+ # keyids for signatures where we didn't recognize the key
+ self._unrecognized = unrecognized[:]
+ # keyids for signatures where we recognized the key, but it doesn't
+ # seem to be allowed to sign this kind of document.
+ self._unauthorized = unauthorized[:]
+
+ def isValid(self, threshold=1):
+ """Return true iff we got at least 'threshold' good signatures."""
+ return len(self._good) >= threshold
+
+ def mayNeedNewKeys(self):
+ """Return true iff downloading a new set of keys might tip this
+ signature status over to 'valid.'"""
+ return len(self._unrecognized) or len(self._unauthorized)
+
+def checkSignatures(signed, keyDB, role=None, path=None):
+ """Given an object conformant to SIGNED_SCHEMA and a set of public keys
+ in keyDB, verify the signed object in 'signed'."""
+
+ SIGNED_SCHEMA.checkMatch(signed)
+
+ goodSigs = []
+ badSigs = []
+ unknownSigs = []
+ tangentialSigs = []
+
+ signable = signed['signed']
+ signatures = signed['signatures']
+
+ d_obj = Crypto.Hash.SHA256.new()
+ getDigest(signable, d_obj)
+ digest = d_obj.digest()
+
+ for signature in signatures:
+ sig = signature['sig']
+ keyid = signature['keyid']
+ method = signature['method']
+
+ try:
+ key = keyDB.getKey(keyid)
+ except KeyError:
+ unknownSigs.append(keyid)
+ continue
+
+ try:
+ result = key.checkSignature(method, sig, digest=digest)
+ except thandy.UnknownMethod:
+ continue
+
+ if result == True:
+ if role is not None:
+ for r,p in key.getRoles():
+ if r == role and rolePathMatches(p, path):
+ break
+ else:
+ tangentialSigs.append(sig)
+ continue
+
+ goodSigs.append(keyid)
+ else:
+ badSigs.append(keyid)
+
+ return SignatureStatus(goodSigs, badSigs, unknownSigs, tangentialSigs)
+
+def encodeCanonical(obj, outf=None):
+ """Encode the object obj in canoncial JSon form, as specified at
+ http://wiki.laptop.org/go/Canonical_JSON . It's a restricted
+ dialect of json in which keys are always lexically sorted,
+ there is no whitespace, floats aren't allowed, and only quote
+ and backslash get escaped. The result is encoded in UTF-8,
+ and the resulting bits are passed to outf (if provided), or joined
+ into a string and returned.
+
+ >>> encodeCanonical("")
+ '""'
+ >>> encodeCanonical([1, 2, 3])
+ '[1,2,3]'
+ >>> encodeCanonical({"x" : 3, "y" : 2})
+ '{"x":3,"y":2}'
+ """
+ def default(o):
+ raise TypeError("Can't encode %r", o)
+ def floatstr(o):
+ raise TypeError("Floats not allowed.")
+ def canonical_str_encoder(s):
+ return '"%s"' % re.sub(r'(["\\])', r'\\\1', s)
+
+ # XXX This is, alas, a hack. I'll submit a canonical JSon patch to
+ # the simplejson folks.
+
+ iterator = simplejson.encoder._make_iterencode(
+ None, default, canonical_str_encoder, None, floatstr,
+ ":", ",", True, False, True)(obj, 0)
+
+ result = None
+ if outf == None:
+ result = [ ]
+ outf = result.append
+
+ for u in iterator:
+ outf(u.encode("utf-8"))
+ if result is not None:
+ return "".join(result)
+
+def getDigest(obj, digestObj=None):
+ """Update 'digestObj' (typically a SHA256 object) with the digest of
+ the canonical json encoding of obj. If digestObj is none,
+ compute the SHA256 hash and return it.
+
+ DOCDOC string equivalence.
+ """
+ useTempDigestObj = (digestObj == None)
+ if useTempDigestObj:
+ digestObj = Crypto.Hash.SHA256.new()
+
+ if isinstance(obj, str):
+ digestObj.update(obj)
+ elif isinstance(obj, unicode):
+ digestObj.update(obj.encode("utf-8"))
+ else:
+ encodeCanonical(obj, digestObj.update)
+
+ if useTempDigestObj:
+ return digestObj.digest()
+
+def getFileDigest(f, digestObj=None):
+ """Update 'digestObj' (typically a SHA256 object) with the digest of
+ the file object in f. If digestObj is none, compute the SHA256
+ hash and return it.
+
+ >>> s = "here is a long string"*1000
+ >>> import cStringIO, Crypto.Hash.SHA256
+ >>> h1 = Crypto.Hash.SHA256.new()
+ >>> h2 = Crypto.Hash.SHA256.new()
+ >>> getFileDigest(cStringIO.StringIO(s), h1)
+ >>> h2.update(s)
+ >>> h1.digest() == h2.digest()
+ True
+ """
+ useTempDigestObj = (digestObj == None)
+ if useTempDigestObj:
+ digestObj = Crypto.Hash.SHA256.new()
+
+ while 1:
+ s = f.read(4096)
+ if not s:
+ break
+ digestObj.update(s)
+
+ if useTempDigestObj:
+ return digestObj.digest()
+
+def makeSignable(obj):
+ return { 'signed' : obj, 'signatures' : [] }
+
+def sign(signed, key):
+ """Add an element to the signatures of 'signed', containing a new signature
+ of the "signed" part.
+ """
+
+ SIGNED_SCHEMA.checkMatch(signed)
+
+ signable = signed["signed"]
+ signatures = signed['signatures']
+
+ keyid = key.getKeyID()
+
+ signatures = [ s for s in signatures if s['keyid'] != keyid ]
+
+ method, sig = key.sign(signable)
+ signatures.append({ 'keyid' : keyid,
+ 'method' : method,
+ 'sig' : sig })
+ signed['signatures'] = signatures
+
+def formatTime(t):
+ """Encode the time 't' in YYYY-MM-DD HH:MM:SS format.
+
+ >>> formatTime(1221265172)
+ '2008-09-13 00:19:32'
+ """
+ return time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(t))
+
+def parseTime(s):
+ """Parse a time 's' in YYYY-MM-DD HH:MM:SS format."""
+ try:
+ return calendar.timegm(time.strptime(s, "%Y-%m-%d %H:%M:%S"))
+ except ValueError:
+ raise thandy.FormatError("Malformed time %r", s)
+
+def formatBase64(h):
+ """Return the base64 encoding of h with whitespace and = signs omitted."""
+ return binascii.b2a_base64(h).rstrip("=\n ")
+
+formatHash = formatBase64
+
+def parseBase64(s):
+ """Parse a base64 encoding with whitespace and = signs omitted. """
+ extra = len(s) % 4
+ if extra:
+ padding = "=" * (4 - extra)
+ s += padding
+ try:
+ return binascii.a2b_base64(s)
+ except binascii.Error:
+ raise thandy.FormatError("Invalid base64 encoding")
+
+def parseHash(s):
+ h = parseBase64(s)
+ if len(h) != Crypto.Hash.SHA256.digest_size:
+ raise thandy.FormatError("Bad hash length")
+ return h
+
+S = thandy.checkJson
+
+# A date, in YYYY-MM-DD HH:MM:SS format.
+TIME_SCHEMA = S.RE(r'\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2}')
+# A hash, base64-encoded
+HASH_SCHEMA = S.RE(r'[a-zA-Z0-9\+\/]{43}')
+
+# A hexadecimal value.
+HEX_SCHEMA = S.RE(r'[a-fA-F0-9]+')
+# A base-64 encoded value
+BASE64_SCHEMA = S.RE(r'[a-zA-Z0-9\+\/]+')
+# An RSA key; subtype of PUBKEY_SCHEMA.
+RSAKEY_SCHEMA = S.Obj(
+ _keytype=S.Str("rsa"),
+ e=BASE64_SCHEMA,
+ n=BASE64_SCHEMA)
+# Any public key.
+PUBKEY_SCHEMA = S.Obj(
+ _keytype=S.AnyStr())
+
+KEYID_SCHEMA = HASH_SCHEMA
+SIG_METHOD_SCHEMA = S.AnyStr()
+RELPATH_SCHEMA = PATH_PATTERN_SCHEMA = S.AnyStr()
+URL_SCHEMA = S.AnyStr()
+VERSION_SCHEMA = S.ListOf(S.Any()) #XXXX WRONG
+
+# A single signature of an object. Indicates the signature, the id of the
+# signing key, and the signing method.
+SIGNATURE_SCHEMA = S.Obj(
+ keyid=KEYID_SCHEMA,
+ method=SIG_METHOD_SCHEMA,
+ sig=BASE64_SCHEMA)
+
+# A signed object.
+SIGNED_SCHEMA = S.Obj(
+ signed=S.Any(),
+ signatures=S.ListOf(SIGNATURE_SCHEMA))
+
+ROLENAME_SCHEMA = S.AnyStr()
+
+# A role: indicates that a key is allowed to certify a kind of
+# document at a certain place in the repo.
+ROLE_SCHEMA = S.Struct([ROLENAME_SCHEMA, PATH_PATTERN_SCHEMA])
+
+# A Keylist: indicates a list of live keys and their roles.
+KEYLIST_SCHEMA = S.Obj(
+ _type=S.Str("Keylist"),
+ ts=TIME_SCHEMA,
+ keys=S.ListOf(S.Obj(key=PUBKEY_SCHEMA, roles=S.ListOf(ROLE_SCHEMA))))
+
+# A Mirrorlist: indicates all the live mirrors, and what documents they
+# serve.
+MIRRORLIST_SCHEMA = S.Obj(
+ _type=S.Str("Mirrorlist"),
+ ts=TIME_SCHEMA,
+ mirrors=S.ListOf(S.Obj(name=S.AnyStr(),
+ urlbase=URL_SCHEMA,
+ contents=S.ListOf(PATH_PATTERN_SCHEMA),
+ weight=S.Int(lo=0),
+ )))
+
+# A timestamp: indicates the lastest versions of all top-level signed objects.
+TIMESTAMP_SCHEMA = S.Obj(
+ _type = S.Str("Timestamp"),
+ at = TIME_SCHEMA,
+ m = S.Struct([TIME_SCHEMA, HASH_SCHEMA]),
+ k = S.Struct([TIME_SCHEMA, HASH_SCHEMA]),
+ b = S.DictOf(keySchema=S.AnyStr(),
+ valSchema=
+ S.Struct([ VERSION_SCHEMA, RELPATH_SCHEMA, TIME_SCHEMA, HASH_SCHEMA ]))
+ )
+
+# A Bundle: lists a bunch of packages that should be updated in tandem
+BUNDLE_SCHEMA = S.Obj(
+ _type=S.Str("Bundle"),
+ at=TIME_SCHEMA,
+ name=S.AnyStr(),
+ os=S.AnyStr(),
+ arch=S.Opt(S.AnyStr()),
+ version=VERSION_SCHEMA,
+ location=RELPATH_SCHEMA,
+ packages=S.ListOf(S.Obj(
+ name=S.AnyStr(),
+ version=VERSION_SCHEMA,
+ path=RELPATH_SCHEMA,
+ hash=HASH_SCHEMA,
+ order=S.Struct([S.Int(), S.Int(), S.Int()]),
+ optional=S.Opt(S.Bool()),
+ gloss=S.DictOf(S.AnyStr(), S.AnyStr()),
+ longgloss=S.DictOf(S.AnyStr(), S.AnyStr()))))
+
+PACKAGE_SCHEMA = S.Obj(
+ _type=S.Str("Package"),
+ name=S.AnyStr(),
+ location=RELPATH_SCHEMA,
+ version=VERSION_SCHEMA,
+ format=S.Obj(),
+ ts=TIME_SCHEMA,
+ files=S.ListOf(S.Struct([RELPATH_SCHEMA, HASH_SCHEMA])),
+ shortdesc=S.DictOf(S.AnyStr(), S.AnyStr()),
+ longdesc=S.DictOf(S.AnyStr(), S.AnyStr()))
+
+ALL_ROLES = ('timestamp', 'mirrors', 'bundle', 'package', 'master')
+
+class Key:
+ #XXXX UNUSED.
+ def __init__(self, key, roles=()):
+ self.key = key
+ self.roles = []
+ for r,p in roles:
+ self.addRole(r,p)
+
+ def addRole(self, role, path):
+ assert role in ALL_ROLES
+ self.roles.append((role, path))
+
+ def getRoles(self):
+ return self.roles
+
+ @staticmethod
+ def fromJSon(obj):
+ # must match PUBKEY_SCHEMA
+ keytype = obj['_keytype']
+ if keytype == 'rsa':
+ return Key(thandy.keys.RSAKey.fromJSon(obj))
+
+ if typeattr == 'rsa':
+ key = thandy.keys.RSAKey.fromSExpression(sexpr)
+ if key is not None:
+ return Key(key)
+ else:
+ return None
+
+ def format(self):
+ return self.key.format()
+
+ def getKeyID(self):
+ return self.key.getKeyID()
+
+ def sign(self, sexpr=None, digest=None):
+ return self.key.sign(sexpr, digest=digest)
+
+ def checkSignature(self, method, data, signatute):
+ ok = self.key.checkSignature(method, data, signature)
+ # XXXX CACHE HERE.
+ return ok
+
+class Keylist(KeyDB):
+ def __init__(self):
+ KeyDB.__init__(self)
+
+ def addFromKeylist(self, obj, allowMasterKeys=False):
+ for keyitem in obj['keys']:
+ key = keyitem['key']
+ roles = keyitem['roles']
+
+ try:
+ key = thandy.keys.RSAKey.fromJSon(key)
+ except thandy.FormatException, e:
+ print e
+ #LOG skipping key.
+ continue
+
+ for r,p in roles:
+ if r == 'master' and not allowMasterKeys:
+ #LOG
+ continue
+ if r not in ALL_ROLES:
+ continue
+ key.addRole(r,p)
+
+ self.addKey(key)
+
+class StampedInfo:
+ def __init__(self, ts, hash, version=None, relpath=None):
+ self._ts = ts
+ self._hash = hash
+ self._version = version
+ self._relpath = relpath
+
+ @staticmethod
+ def fromJSonFields(timeStr, hashStr):
+ t = parseTime(timeStr)
+ h = parseHash(hashStr)
+ return StampedInfo(t, h)
+
+ def getHash(self):
+ return self._hash
+
+ def getRelativePath(self):
+ return self._relpath
+
+class TimestampFile:
+ def __init__(self, at, mirrorlistinfo, keylistinfo, bundleinfo):
+ self._time = at
+ self._mirrorListInfo = mirrorlistinfo
+ self._keyListInfo = keylistinfo
+ self._bundleInfo = bundleinfo
+
+ @staticmethod
+ def fromJSon(obj):
+ # must be validated.
+ at = parseTime(obj['at'])
+ m = StampedInfo.fromJSonFields(*obj['m'][:2])
+ k = StampedInfo.fromJSonFields(*obj['k'][:2])
+ b = {}
+ for name, bundle in obj['b'].iteritems():
+ v = bundle[0]
+ rp = bundle[1]
+ t = parseTime(bundle[2])
+ h = parseHash(bundle[3])
+ b[name] = StampedInfo(t, h, v, rp)
+
+ return TimestampFile(at, m, k, b)
+
+ def getTime(self):
+ return self._time
+
+ def getMirrorlistInfo(self):
+ return self._mirrorListInfo
+
+ def getKeylistInfo(self):
+ return self._keyListInfo
+
+ def getBundleInfo(self, name):
+ return self._bundleInfo[name]
+
+def readConfigFile(fname, needKeys=(), optKeys=(), preload={}):
+ parsed = preload.copy()
+ result = {}
+ execfile(fname, parsed)
+
+ for k in needKeys:
+ try:
+ result[k] = parsed[k]
+ except KeyError:
+ raise thandy.FormatError("Missing value for %s in %s"%k,fname)
+
+ for k in optKeys:
+ try:
+ result[k] = parsed[k]
+ except KeyError:
+ pass
+
+ return result
+
+def makePackageObj(config_fname, package_fname):
+ preload = {}
+ shortDescs = {}
+ longDescs = {}
+ def ShortDesc(lang, val): shortDescs[lang] = val
+ def LongDesc(lang, val): longDescs[lang] = val
+ preload = { 'ShortDesc' : ShortDesc, 'LongDesc' : LongDesc }
+ r = readConfigFile(config_fname,
+ ['name',
+ 'version',
+ 'format',
+ 'location',
+ 'relpath',
+ ], (), preload)
+
+ f = open(package_fname, 'rb')
+ digest = getFileDigest(f)
+
+ # Check fields!
+ result = { '_type' : "Package",
+ 'ts' : formatTime(time.time()),
+ 'name' : r['name'],
+ 'location' : r['location'], #DOCDOC
+ 'version' : r['version'],
+ 'format' : r['format'],
+ 'files' : [ [ r['relpath'], formatHash(digest) ] ],
+ 'shortdesc' : shortDescs,
+ 'longdesc' : longDescs
+ }
+
+ PACKAGE_SCHEMA.checkMatch(result)
+
+ return result
+
+def makeBundleObj(config_fname, getPackageHash):
+ packages = []
+ def ShortGloss(lang, val): packages[-1]['gloss'][lang] = val
+ def LongGloss(lang, val): packages[-1]['longgloss'][lang] = val
+ def Package(name, version, path, order, optional=False):
+ packages.append({'name' : name,
+ 'version' : version,
+ 'path' : path,
+ 'order' : order,
+ 'optional' : optional,
+ 'gloss' : {},
+ 'longgloss' : {} })
+ preload = { 'ShortGloss' : ShortGloss, 'LongGloss' : LongGloss,
+ 'Package' : Package }
+ r = readConfigFile(config_fname,
+ ['name',
+ 'os',
+ 'version',
+ 'location',
+ ], ['arch'], preload)
+
+ result = { '_type' : "Bundle",
+ 'at' : formatTime(time.time()),
+ 'name' : r['name'],
+ 'os' : r['os'],
+ 'version' : r['version'],
+ 'location' : r['location'],
+ 'packages' : packages }
+ if r.has_key('arch'):
+ result['arch'] = r['arch']
+
+ for p in packages:
+ try:
+ p['hash'] = formatHash(getPackageHash(p['path']))
+ except KeyError:
+ raise thandy.FormatException("No such package as %s"%p['path'])
+
+ BUNDLE_SCHEMA.checkMatch(result)
+ return result
+
+def versionIsNewer(v1, v2):
+ return v1 > v2
+
+def makeTimestampObj(mirrorlist_obj, keylist_obj,
+ bundle_objs):
+ result = { '_type' : 'Timestamp',
+ 'at' : formatTime(time.time()) }
+ result['m'] = [ mirrorlist_obj['ts'],
+ formatHash(getDigest(mirrorlist_obj)) ]
+ result['k'] = [ keylist_obj['ts'],
+ formatHash(getDigest(keylist_obj)) ]
+ result['b'] = bundles = {}
+ for bundle in bundle_objs:
+ name = bundle['name']
+ v = bundle['version']
+ entry = [ v, bundle['location'], bundle['at'], formatHash(getDigest(bundle)) ]
+ if not bundles.has_key(name) or versionIsNewer(v, bundles[name][0]):
+ bundles[name] = entry
+
+ TIMESTAMP_SCHEMA.checkMatch(result)
+
+ return result
+
+class MirrorInfo:
+ def __init__(self, name, urlbase, contents, weight):
+ self._name = name
+ self._urlbase = urlbase
+ self._contents = contents
+ self._weight = weight
+
+ def canServeFile(self, fname):
+ for c in self._contents:
+ if rolePathMatches(c, fname):
+ return True
+ return False
+
+ def getFileURL(self, fname):
+ if self._urlbase[-1] == '/':
+ return self._urlbase+fname
+ else:
+ return "%s/%s" % (self._urlbase, fname)
+
+ def format(self):
+ return { 'name' : self._name,
+ 'urlbase' : self._urlbase,
+ 'contents' : self._contents,
+ 'weight' : self._weight }
+
+def makeMirrorListObj(mirror_fname):
+ mirrors = []
+ def Mirror(*a, **kw): mirrors.append(MirrorInfo(*a, **kw))
+ preload = {'Mirror' : Mirror}
+ r = readConfigFile(mirror_fname, (), (), preload)
+ result = { '_type' : "Mirrorlist",
+ 'ts' : formatTime(time.time()),
+ 'mirrors' : [ m.format() for m in mirrors ] }
+
+ MIRRORLIST_SCHEMA.checkMatch(result)
+ return result
+
+def makeKeylistObj(keylist_fname, includePrivate=False):
+ keys = []
+ def Key(obj): keys.append(obj)
+ preload = {'Key': Key}
+ r = readConfigFile(keylist_fname, (), (), preload)
+
+ klist = []
+ for k in keys:
+ k = thandy.keys.RSAKey.fromJSon(k)
+ klist.append({'key': k.format(private=includePrivate), 'roles' : k.getRoles() })
+
+ result = { '_type' : "Keylist",
+ 'ts' : formatTime(time.time()),
+ 'keys' : klist }
+
+ KEYLIST_SCHEMA.checkMatch(result)
+ return result
+
+SCHEMAS_BY_TYPE = {
+ 'Keylist' : KEYLIST_SCHEMA,
+ 'Mirrorlist' : MIRRORLIST_SCHEMA,
+ 'Timestamp' : TIMESTAMP_SCHEMA,
+ 'Bundle' : BUNDLE_SCHEMA,
+ 'Package' : PACKAGE_SCHEMA,
+ }
+
+def checkSignedObj(obj, keydb=None):
+ # Returns signaturestatus, role, path on sucess.
+
+ SIGNED_SCHEMA.checkMatch(obj)
+ try:
+ tp = obj['signed']['_type']
+ except KeyError:
+ raise thandy.FormatException("Untyped object")
+ try:
+ schema = SCHEMAS_BY_TYPE[tp]
+ except KeyError:
+ raise thandy.FormatException("Unrecognized type %r" % tp)
+ schema.checkMatch(obj['signed'])
+
+ if tp == 'Keylist':
+ role = "master"
+ path = "/meta/keys.txt"
+ elif tp == 'Mirrorlist':
+ role = "mirrors"
+ path = "/meta/mirrors.txt"
+ elif tp == "Timestamp":
+ role = 'timestamp'
+ path = "/meta/timestamp.txt"
+ elif tp == 'Bundle':
+ role = 'bundle'
+ path = obj['signed']['location']
+ elif tp == 'Package':
+ role = 'package'
+ path = obj['signed']['location']
+ else:
+ print tp
+ raise "Foo"
+
+ ss = None
+ if keydb is not None:
+ ss = checkSignatures(obj, keydb, role, path)
+
+ return ss, role, path
diff --git a/lib/thandy/keys.py b/lib/thandy/keys.py
new file mode 100644
index 0000000..5b4e072
--- /dev/null
+++ b/lib/thandy/keys.py
@@ -0,0 +1,399 @@
+
+# These require PyCrypto.
+import Crypto.PublicKey.RSA
+import Crypto.Hash.SHA256
+import Crypto.Cipher.AES
+
+import cPickle as pickle
+import binascii
+import logging
+import os
+import struct
+import sys
+import simplejson
+import getpass
+
+import thandy.formats
+import thandy.util
+
+class PublicKey:
+ def __init__(self):
+ # Confusingly, these roles are the ones used for a private key to
+ # remember what we're willing to do with it.
+ self._roles = []
+ def format(self):
+ raise NotImplemented()
+ def sign(self, data):
+ # returns a list of method,signature tuples.
+ raise NotImplemented()
+ def checkSignature(self, method, data, signature):
+ # returns True, False, or raises UnknownMethod.
+ raise NotImplemented()
+ def getKeyID(self):
+ raise NotImplemented()
+ def getRoles(self):
+ return self._roles
+ def addRole(self, role, path):
+ assert role in thandy.formats.ALL_ROLES
+ self._roles.append((role, path))
+ def clearRoles(self):
+ del self._roles[:]
+ def hasRole(self, role, path):
+ for r, p in self._roles:
+ if r == role and thandy.formats.rolePathMatches(p, path):
+ return True
+ return False
+
+if hex(1L).upper() == "0X1L":
+ def intToBinary(number):
+ """Convert an int or long into a big-endian series of bytes.
+ """
+ # This "convert-to-hex, then use binascii" approach may look silly,
+ # but it's over 10x faster than the Crypto.Util.number approach.
+ h = hex(long(number))
+ h = h[2:-1]
+ if len(h)%2:
+ h = "0"+h
+ return binascii.a2b_hex(h)
+elif hex(1L).upper() == "0X1":
+ def intToBinary(number):
+ h = hex(long(number))
+ h = h[2:]
+ if len(h)%2:
+ h = "0"+h
+ return binascii.a2b_hex(h)
+else:
+ import Crypto.Util.number
+ intToBinary = Crypto.Util.number.long_to_bytes
+ assert None
+
+def binaryToInt(binary):
+ """Convert a big-endian series of bytes into a long.
+ """
+ return long(binascii.b2a_hex(binary), 16)
+
+def intToBase64(number):
+ return thandy.formats.formatBase64(intToBinary(number))
+
+def base64ToInt(number):
+ return binaryToInt(thandy.formats.parseBase64(number))
+
+def _pkcs1_padding(m, size):
+ # I'd rather use OAEP+, but apparently PyCrypto barely supports
+ # signature verification, and doesn't seem to support signature
+ # verification with nondeterministic padding. "argh."
+
+ s = [ "\x00\x01", "\xff"* (size-3-len(m)), "\x00", m ]
+ r = "".join(s)
+ return r
+
+def _xor(a,b):
+ if a:
+ return not b
+ else:
+ return b
+
+class RSAKey(PublicKey):
+ """
+ >>> k = RSAKey.generate(bits=512)
+ >>> obj = k.format()
+ >>> obj['_keytype']
+ 'rsa'
+ >>> base64ToInt(obj['e'])
+ 65537L
+ >>> k1 = RSAKey.fromJSon(obj)
+ >>> k1.key.e == k.key.e
+ True
+ >>> k1.key.n == k.key.n
+ True
+ >>> k.getKeyID() == k1.getKeyID()
+ True
+ >>> s = { 'A B C' : "D", "E" : [ "F", "g", 99] }
+ >>> method, sig = k.sign(obj=s)
+ >>> k.checkSignature(method, sig, obj=s)
+ True
+ >>> s2 = [ s ]
+ >>> k.checkSignature(method, sig, obj=s2)
+ False
+ """
+ def __init__(self, key):
+ PublicKey.__init__(self)
+ self.key = key
+ self.keyid = None
+
+ @staticmethod
+ def generate(bits=2048):
+ key = Crypto.PublicKey.RSA.generate(bits=bits, randfunc=os.urandom)
+ return RSAKey(key)
+
+ @staticmethod
+ def fromJSon(obj):
+ # obj must match RSAKEY_SCHEMA
+
+ thandy.formats.RSAKEY_SCHEMA.checkMatch(obj)
+ n = base64ToInt(obj['n'])
+ e = base64ToInt(obj['e'])
+ if obj.has_key('d'):
+ d = base64ToInt(obj['d'])
+ p = base64ToInt(obj['p'])
+ q = base64ToInt(obj['q'])
+ u = base64ToInt(obj['u'])
+ key = Crypto.PublicKey.RSA.construct((n, e, d, p, q, u))
+ else:
+ key = Crypto.PublicKey.RSA.construct((n, e))
+
+ result = RSAKey(key)
+ if obj.has_key('roles'):
+ for r, p in obj['roles']:
+ result.addRole(r,p)
+
+ return result
+
+ def isPrivateKey(self):
+ return hasattr(self.key, 'd')
+
+ def format(self, private=False, includeRoles=False):
+ n = intToBase64(self.key.n)
+ e = intToBase64(self.key.e)
+ result = { '_keytype' : 'rsa',
+ 'e' : e,
+ 'n' : n }
+ if private:
+ result['d'] = intToBase64(self.key.d)
+ result['p'] = intToBase64(self.key.p)
+ result['q'] = intToBase64(self.key.q)
+ result['u'] = intToBase64(self.key.u)
+ if includeRoles:
+ result['roles'] = self.getRoles()
+ return result
+
+ def getKeyID(self):
+ if self.keyid == None:
+ d_obj = Crypto.Hash.SHA256.new()
+ thandy.formats.getDigest(self.format(), d_obj)
+ self.keyid = thandy.formats.formatHash(d_obj.digest())
+ return self.keyid
+
+ def _digest(self, obj, method=None):
+ if method in (None, "sha256-pkcs1"):
+ d_obj = Crypto.Hash.SHA256.new()
+ thandy.formats.getDigest(obj, d_obj)
+ digest = d_obj.digest()
+ return ("sha256-pkcs1", digest)
+
+ raise UnknownMethod(method)
+
+ def sign(self, obj=None, digest=None):
+ assert _xor(obj == None, digest == None)
+ if digest == None:
+ method, digest = self._digest(obj)
+ m = _pkcs1_padding(digest, (self.key.size()+1) // 8)
+ sig = intToBase64(self.key.sign(m, "")[0])
+ return (method, sig)
+
+ def checkSignature(self, method, sig, obj=None, digest=None):
+ assert _xor(obj == None, digest == None)
+ if method != "sha256-pkcs1":
+ raise UnknownMethod("method")
+ if digest == None:
+ method, digest = self._digest(obj, method)
+ sig = base64ToInt(sig)
+ m = _pkcs1_padding(digest, (self.key.size()+1) // 8)
+ return bool(self.key.verify(m, (sig,)))
+
+SALTLEN=16
+
+def secretToKey(salt, secret):
+ """Convert 'secret' to a 32-byte key, using a version of the algorithm
+ from RFC2440. The salt must be SALTLEN+1 bytes long, and should
+ be random, except for the last byte, which encodes how time-
+ consuming the computation should be.
+
+ (The goal is to make offline password-guessing attacks harder by
+ increasing the time required to convert a password to a key, and to
+ make precomputed password tables impossible to generate by )
+ """
+ assert len(salt) == SALTLEN+1
+
+ # The algorithm is basically, 'call the last byte of the salt the
+ # "difficulty", and all other bytes of the salt S. Now make
+ # an infinite stream of S|secret|S|secret|..., and hash the
+ # first N bytes of that, where N is determined by the difficulty.
+ #
+ # Obviously, this wants a hash algorithm that's tricky to
+ # parallelize.
+ #
+ # Unlike RFC2440, we use a 16-byte salt. Because CPU times
+ # have improved, we start at 16 times the previous minimum.
+
+ difficulty = ord(salt[-1])
+ count = (16L+(difficulty & 15)) << ((difficulty >> 4) + 10)
+
+ # Make 'data' nice and long, so that we don't need to call update()
+ # a zillion times.
+ data = salt[:-1]+secret
+ if len(data)<1024:
+ data *= (1024 // len(data))+1
+
+ d = Crypto.Hash.SHA256.new()
+ iters, leftover = divmod(count, len(data))
+ for _ in xrange(iters):
+ d.update(data)
+ #count -= len(data)
+ if leftover:
+ d.update(data[:leftover])
+ #count -= leftover
+ #assert count == 0
+
+ return d.digest()
+
+def encryptSecret(secret, password, difficulty=0x80):
+ """Encrypt the secret 'secret' using the password 'password',
+ and return the encrypted result."""
+ # The encrypted format is:
+ # "GKEY1" -- 5 octets, fixed, denotes data format.
+ # SALT -- 17 bytes, used to hash password
+ # IV -- 16 bytes; salt for encryption
+ # ENCRYPTED IN AES256-OFB, using a key=s2k(password, salt) and IV=IV:
+ # SLEN -- 4 bytes; length of secret, big-endian.
+ # SECRET -- len(secret) bytes
+ # D -- 32 bytes; SHA256 hash of (salt|secret|salt).
+ #
+ # This format leaks the secret length, obviously.
+ assert 0 <= difficulty < 256
+ salt = os.urandom(SALTLEN)+chr(difficulty)
+ key = secretToKey(salt, password)
+
+ d_obj = Crypto.Hash.SHA256.new()
+ d_obj.update(salt)
+ d_obj.update(secret)
+ d_obj.update(salt)
+ d = d_obj.digest()
+
+ iv = os.urandom(16)
+ e = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_OFB, iv)
+
+ # Stupidly, pycrypto doesn't accept that stream ciphers don't need to
+ # take their input in blocks. So pad it, then ignore the padded output.
+
+ padlen = 16-((len(secret)+len(d)+4) % 16)
+ if padlen == 16: padlen = 0
+ pad = '\x00' * padlen
+
+ slen = struct.pack("!L",len(secret))
+ encrypted = e.encrypt("%s%s%s%s" % (slen, secret, d, pad))
+ if padlen:
+ encrypted = encrypted[:-padlen]
+ return "GKEY1%s%s%s"%(salt, iv, encrypted)
+
+def decryptSecret(encrypted, password):
+ """Decrypt a value encrypted with encryptSecret. Raises UnknownFormat
+ or FormatError if 'encrypted' was not generated with encryptSecret.
+ Raises BadPassword if the password was not correct.
+ """
+ if encrypted[:5] != "GKEY1":
+ raise thandy.UnknownFormat()
+ encrypted = encrypted[5:]
+ if len(encrypted) < SALTLEN+1+16:
+ raise thandy.FormatException()
+
+ salt = encrypted[:SALTLEN+1]
+ iv = encrypted[SALTLEN+1:SALTLEN+1+16]
+ encrypted = encrypted[SALTLEN+1+16:]
+
+ key = secretToKey(salt, password)
+
+ e = Crypto.Cipher.AES.new(key, Crypto.Cipher.AES.MODE_OFB, iv)
+ padlen = 16-(len(encrypted) % 16)
+ if padlen == 16: padlen = 0
+ pad = '\x00' * padlen
+
+ decrypted = e.decrypt("%s%s"%(encrypted,pad))
+ slen = struct.unpack("!L", decrypted[:4])[0]
+ secret = decrypted[4:4+slen]
+ hash = decrypted[4+slen:4+slen+Crypto.Hash.SHA256.digest_size]
+
+ d = Crypto.Hash.SHA256.new()
+ d.update(salt)
+ d.update(secret)
+ d.update(salt)
+
+ if d.digest() != hash:
+ raise thandy.BadPassword()
+
+ return secret
+
+class KeyStore(thandy.formats.KeyDB):
+ def __init__(self, fname, encrypted=True):
+ thandy.formats.KeyDB.__init__(self)
+
+ self._loaded = None
+ self._fname = fname
+ self._passwd = None
+ self._encrypted = encrypted
+
+ def getpass(self, reprompt=False):
+ if self._passwd != None:
+ return self._passwd
+ while 1:
+ pwd = getpass.getpass("Password: ", sys.stderr)
+ if not reprompt:
+ return pwd
+
+ pwd2 = getpass.getpass("Confirm: ", sys.stderr)
+ if pwd == pwd2:
+ return pwd
+ else:
+ print "Mismatch; try again."
+
+ def load(self, password=None):
+ logging.info("Loading private keys from %r...", self._fname)
+ if not os.path.exists(self._fname):
+ logging.info("...no such file.")
+ self._loaded = True
+ return
+
+ if password is None and self._encrypted:
+ password = self.getpass()
+
+ contents = open(self._fname, 'rb').read()
+ if self._encrypted:
+ contents = decryptSecret(contents, password)
+
+ listOfKeys = simplejson.loads(contents)
+ self._passwd = password # It worked.
+ if not listOfKeys.has_key('keys'):
+ listOfKeys['keys'] = []
+ for obj in listOfKeys['keys']:
+ key = RSAKey.fromJSon(obj)
+ self.addKey(key)
+ logging.info("Loaded key %s", key.getKeyID())
+
+ self._loaded = True
+
+ def setPassword(self, passwd):
+ self._passwd = passwd
+
+ def clearPassword(self):
+ self._passwd = None
+
+ def save(self, password=None):
+ if not self._loaded and self._encrypted:
+ self.load(password)
+
+ if password is None:
+ password = self.getpass(True)
+
+ logging.info("Saving private keys into %r...", self._fname)
+ listOfKeys = { 'keys' :
+ [ key.format(private=True, includeRoles=True) for key in
+ self._keys.values() ]
+ }
+ contents = simplejson.dumps(listOfKeys)
+ if self._encrypted:
+ contents = encryptSecret(contents, password)
+ thandy.util.replaceFile(self._fname, contents)
+ self._passwd = password # It worked.
+ logging.info("Done.")
+
+
diff --git a/lib/thandy/master_keys.py b/lib/thandy/master_keys.py
new file mode 100644
index 0000000..0d455d1
--- /dev/null
+++ b/lib/thandy/master_keys.py
@@ -0,0 +1,5 @@
+
+
+MASTER_KEYS = [
+
+]
diff --git a/lib/thandy/repository.py b/lib/thandy/repository.py
new file mode 100644
index 0000000..dc13f1b
--- /dev/null
+++ b/lib/thandy/repository.py
@@ -0,0 +1,313 @@
+
+import thandy.formats
+import thandy.util
+
+import simplejson
+import logging
+import os
+import threading
+import time
+
+MAX_TIMESTAMP_AGE = 24*60*60
+
+class RepositoryFile:
+ def __init__(self, repository, relativePath, schema,
+ needRole=None, signedFormat=True, needSigs=1):
+ self._repository = repository
+ self._relativePath = relativePath
+ self._schema = schema
+ self._needRole = needRole
+ self._signedFormat = signedFormat
+ self._needSigs = needSigs
+
+ self._signed_obj = self._main_obj = None
+ self._sigStatus = None
+ self._mtime = None
+
+ def getRelativePath(self):
+ return self._relativePath
+
+ def getPath(self):
+ return self._repository.getFilename(self._relativePath)
+
+ def _load(self):
+ fname = self.getPath()
+
+ # Propagate OSError
+ f = None
+ fd = os.open(fname, os.O_RDONLY)
+ try:
+ f = os.fdopen(fd, 'r')
+ except:
+ os.close(fd)
+ raise
+ try:
+ mtime = os.fstat(fd).st_mtime
+ content = f.read()
+ finally:
+ f.close()
+
+ signed_obj,main_obj = self._checkContent(content)
+
+ self._signed_obj = signed_obj
+ self._main_obj = main_obj
+ self._mtime = mtime
+
+ def _save(self, content=None):
+ if content == None:
+ content = sexpr.encode
+
+ signed_obj,main_obj = self._checkContent(content)
+
+ fname = self.getPath()
+ thandy.util.replaceFile(fname, contents)
+
+ self._signed_obj = signed_obj
+ self._main_obj = main_obj
+ self._mtime = mtime
+
+ def _checkContent(self, content):
+
+ try:
+ obj = simplejson.loads(content)
+ except ValueError, e:
+ raise thandy.FormatException("Couldn't decode content: %s"%e)
+
+ if self._signedFormat:
+ # This is supposed to be signed.
+ thandy.formats.SIGNED_SCHEMA.checkMatch(obj)
+
+ main_obj = obj['signed']
+ signed_obj = obj
+ else:
+ signed_obj = None
+ main_obj = obj
+
+ if self._schema != None:
+ self._schema.checkMatch(main_obj)
+
+ return signed_obj, main_obj
+
+ def load(self):
+ if self._main_obj == None:
+ self._load()
+
+ def get(self):
+ return self._main_obj
+
+ def isLoaded(self):
+ return self._main_obj != None
+
+ def getContent(self):
+ self.load()
+ return self._main_obj
+
+ def _checkSignatures(self):
+ self.load()
+ sigStatus = thandy.formats.checkSignatures(self._signed_obj,
+ self._repository._keyDB,
+ self._needRole, self._relativePath)
+ self._sigStatus = sigStatus
+
+ def checkSignatures(self):
+ if self._sigStatus is None:
+ self._checkSignatures()
+ return self._sigStatus
+
+class LocalRepository:
+ def __init__(self, root):
+ self._root = root
+ self._keyDB = thandy.util.getKeylist(None)
+
+ self._keylistFile = RepositoryFile(
+ self, "/meta/keys.txt", thandy.formats.KEYLIST_SCHEMA,
+ needRole="master")
+ self._timestampFile = RepositoryFile(
+ self, "/meta/timestamp.txt", thandy.formats.TIMESTAMP_SCHEMA,
+ needRole="timestamp")
+ self._mirrorlistFile = RepositoryFile(
+ self, "/meta/mirrors.txt", thandy.formats.MIRRORLIST_SCHEMA,
+ needRole="mirrors")
+ self._metaFiles = [ self._keylistFile,
+ self._timestampFile,
+ self._mirrorlistFile ]
+
+ self._packageFiles = {}
+ self._bundleFiles = {}
+
+ def getFilename(self, relativePath):
+ if relativePath.startswith("/"):
+ relativePath = relativePath[1:]
+ return os.path.join(self._root, relativePath)
+
+ def getKeylistFile(self):
+ return self._keylistFile
+
+ def getTimestampFile(self):
+ return self._timestampFile
+
+ def getMirrorlistFile(self):
+ return self._mirrorlistFile
+
+ def getPackageFile(self, relPath):
+ try:
+ return self._packageFiles[relPath]
+ except KeyError:
+ self._packageFiles[relPath] = pkg = RepositoryFile(
+ self, relPath, thandy.formats.PACKAGE_SCHEMA,
+ needRole='package')
+ return pkg
+
+ def getBundleFile(self, relPath):
+ try:
+ return self._bundleFiles[relPath]
+ except KeyError:
+ self._bundleFiles[relPath] = pkg = RepositoryFile(
+ self, relPath, thandy.formats.BUNDLE_SCHEMA,
+ needRole='bundle')
+ return pkg
+
+ def getFilesToUpdate(self, now=None, trackingBundles=()):
+ if now == None:
+ now = time.time()
+
+ need = set()
+
+ # Fetch missing metafiles.
+ for f in self._metaFiles:
+ try:
+ f.load()
+ except OSError, e:
+ print "need", f.getPath()
+ logging.info("Couldn't load %s: %s. Must fetch it.",
+ f.getPath(), e)
+ need.add(f.getRelativePath())
+
+ # If the timestamp file is out of date, we need to fetch it no
+ # matter what. (Even if it is isn't signed, it can't possibly
+ # be good.)
+ ts = self._timestampFile.get()
+ if ts:
+ age = now - thandy.formats.parseTime(ts['at'])
+ ts = thandy.formats.TimestampFile.fromJSon(ts)
+ if age > MAX_TIMESTAMP_AGE:
+ need.add(self._timestampFile.getRelativePath())
+
+ # If the keylist isn't signed right, we can't check the
+ # signatures on anything else.
+ if self._keylistFile.get():
+ s = self._keylistFile.checkSignatures()
+ if not s.isValid(): # For now only require one master key.
+ need.add(self._keylistFile.getRelativePath())
+
+ if need:
+ return need
+
+ # Import the keys from the keylist.
+ self._keyDB.addFromKeylist(self._keylistFile.get())
+
+ # If the timestamp isn't signed right, get a new timestamp and a
+ # new keylist.
+ s = self._timestampFile.checkSignatures()
+ if not s.isValid():
+ need.add(self._keylistFile.getRelativePath())
+ need.add(self._timestampFile.getRelativePath())
+ return need
+
+ # FINALLY, we know we have an up-to-date, signed timestamp
+ # file. Check whether the keys and mirrors file are as
+ # authenticated.
+ h_kf = thandy.formats.getDigest(self._keylistFile.get())
+ h_expected = ts.getKeylistInfo().getHash()
+ if h_kf != h_expected:
+ need.add(self._keylistFile.getRelativePath())
+
+ if need:
+ return need
+
+ s = self._mirrorlistFile.checkSignatures()
+ if not s.isValid():
+ need.add(self._mirrorlistFile.getRelativePath())
+
+ h_mf = thandy.formats.getDigest(self._mirrorlistFile.get())
+ h_expected = ts.getMirrorlistInfo().getHash()
+ if h_mf != h_expected:
+ need.add(self._mirrorlistFile.getRelativePath())
+
+ if need:
+ return need
+
+ # Okay; that's it for the metadata. Do we have the right
+ # bundles?
+ bundles = {}
+ for b in trackingBundles:
+ try:
+ binfo = ts.getBundleInfo(b)
+ except KeyError:
+ logging.warn("Unrecognized bundle %s"%b)
+ continue
+
+ rp = binfo.getRelativePath()
+ bfile = self.getBundleFile(rp)
+ try:
+ bfile.load()
+ except OSError:
+ need.add(rp)
+ continue
+
+ h_b = thandy.formats.getDigest(bfile.get())
+ h_expected = binfo.getHash()
+ if h_b != h_expected:
+ need.add(rp)
+ continue
+
+ s = bfile.checkSignatures()
+ if not s.isValid():
+ # Can't actually use it.
+ continue
+
+ bundles[rp] = bfile
+
+ # Okay. So we have some bundles. See if we have their packages.
+ packages = {}
+ for bfile in bundles.values():
+ bundle = bfile.get()
+ for pkginfo in bundle['packages']:
+ rp = pkginfo['path']
+ pfile = self.getPackageFile(rp)
+ try:
+ pfile.load()
+ except OSError:
+ need.add(rp)
+ continue
+
+ h_p = thandy.formats.getDigest(pfile.get())
+ h_expected = thandy.formats.parseHash(pkginfo['hash'])
+ if h_p != h_expected:
+ need.add(rp)
+ continue
+
+ s = pfile.checkSignatures()
+ if not s.isValid():
+ # Can't use it.
+ continue
+ packages[rp] = pfile
+
+ # Finally, we have some packages. Do we have their underlying
+ # files?
+ for pfile in packages.values():
+ package = pfile.get()
+ for f in package['files']:
+ rp, h = f[:2]
+ h_expected = thandy.formats.parseHash(h)
+ fn = self.getFilename(rp)
+ try:
+ h_got = thandy.formats.getFileDigest(fn)
+ except OSError:
+ need.add(rp)
+ continue
+ if h_got != h_expected:
+ need.add(rp)
+
+ # Okay; these are the files we need.
+ return need
diff --git a/lib/thandy/tests.py b/lib/thandy/tests.py
new file mode 100644
index 0000000..8b967a5
--- /dev/null
+++ b/lib/thandy/tests.py
@@ -0,0 +1,64 @@
+
+import unittest
+import doctest
+import os
+import tempfile
+
+import thandy.keys
+import thandy.formats
+import thandy.repository
+import thandy.checkJson
+
+import thandy.tests
+
+class CanonicalEncodingTest(unittest.TestCase):
+ def test_encode(self):
+ enc = thandy.formats.encodeCanonical
+ self.assertEquals(enc(''), '""')
+ self.assertEquals(enc('"'), '"\\""')
+ self.assertEquals(enc('\t\\\n"\r'),
+ '"\t\\\\\n\\"\r"')
+
+class CryptoTests(unittest.TestCase):
+ def test_encrypt(self):
+ s = "The Secret words are marzipan habidashery zeugma."
+ password = "the password is swordfish."
+ encrypted = thandy.keys.encryptSecret(s, password)
+ self.assertNotEquals(encrypted, s)
+ self.assert_(encrypted.startswith("GKEY1"))
+ self.assertEquals(s, thandy.keys.decryptSecret(encrypted, password))
+ self.assertRaises(thandy.BadPassword, thandy.keys.decryptSecret,
+ encrypted, "password")
+ self.assertRaises(thandy.UnknownFormat, thandy.keys.decryptSecret,
+ "foobar", password)
+
+ def test_keystore(self):
+ passwd = "umfitty noonah"
+ fname = tempfile.mktemp()
+ ks = thandy.keys.KeyStore(fname)
+ key1 = thandy.keys.RSAKey.generate(512)
+ key2 = thandy.keys.RSAKey.generate(512)
+ ks.addKey(key1)
+ ks.addKey(key2)
+ ks.save(passwd)
+
+ ks2 = thandy.keys.KeyStore(fname)
+ ks2.load(passwd)
+ self.assertEquals(key1.key.n, ks2.getKey(key1.getKeyID()).key.n)
+
+def suite():
+ suite = unittest.TestSuite()
+
+ suite.addTest(doctest.DocTestSuite(thandy.formats))
+ suite.addTest(doctest.DocTestSuite(thandy.keys))
+ suite.addTest(doctest.DocTestSuite(thandy.checkJson))
+
+ loader = unittest.TestLoader()
+ suite.addTest(loader.loadTestsFromModule(thandy.tests))
+
+ return suite
+
+
+if __name__ == '__main__':
+
+ unittest.TextTestRunner(verbosity=1).run(suite())
diff --git a/lib/thandy/util.py b/lib/thandy/util.py
new file mode 100644
index 0000000..e87ed8b
--- /dev/null
+++ b/lib/thandy/util.py
@@ -0,0 +1,73 @@
+
+import os
+import sys
+import tempfile
+
+import simplejson
+
+import thandy.formats
+import thandy.keys
+import thandy.master_keys
+
+def moveFile(fromLocation, toLocation):
+ if sys.platform in ('cygwin', 'win32'):
+ # Win32 doesn't let rename replace an existing file.
+ try:
+ os.unlink(toLocation)
+ except OSError:
+ pass
+ os.rename(fromLocation, toLocation)
+
+
+def replaceFile(fname, contents, textMode=False):
+ """overwrite the file in 'fname' atomically with the content of 'contents'
+ """
+ dir, prefix = os.path.split(fname)
+ fd, fname_tmp = tempfile.mkstemp(prefix=prefix, dir=dir, text=textMode)
+
+ try:
+ os.write(fd, contents)
+ finally:
+ os.close(fd)
+
+ moveFile(fname_tmp, fname)
+
+def userFilename(name):
+ try:
+ base = os.environ["THANDY_HOME"]
+ except KeyError:
+ base = "~/.thandy"
+ base = os.path.expanduser(base)
+ if not os.path.exists(base):
+ os.makedirs(base, 0700)
+ return os.path.join(base, name)
+
+def getKeylist(keys_fname, checkKeys=True):
+ import thandy.master_keys
+
+ keydb = thandy.formats.Keylist()
+
+ for key in thandy.master_keys.MASTER_KEYS:
+ keydb.addKey(key)
+
+ user_keys = userFilename("preload_keys")
+ if os.path.exists(user_keys):
+ #XXXX somewhat roundabout.
+ keylist = thandy.formats.makeKeylistObj(user_keys)
+ keydb.addFromKeylist(keylist, allowMasterKeys=True)
+
+ if keys_fname and os.path.exists(keys_fname):
+ f = open(keys_fname, 'r')
+ try:
+ obj = simplejson.load(f)
+ finally:
+ f.close()
+ ss, role, path = thandy.formats.checkSignedObj(obj, keydb)
+ if role != 'master':
+ raise thandy.FormatException("%s wasn't a keylist."%keys_fname)
+ if checkKeys and not ss.isValid():
+ raise thandy.FormatException("%s not signed by enough master keys"%
+ keys_fname)
+ keydb.addFromKeylist(obj['signed'], allowMasterKeys=False)
+
+ return keydb