X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Futils.py;h=9bbf711ac5f92e71cfd06ddda7ae406698dbb608;hb=fdf3c42445b4f11f4cd71634dd2b57cb7d7a4f36;hp=7ff93192eec7d9042a95d49e801c47cb05eab39b;hpb=f50c6be13c107a29ed5241c52b3e5c8928bed7bd;p=dak.git diff --git a/daklib/utils.py b/daklib/utils.py index 7ff93192..9bbf711a 100755 --- a/daklib/utils.py +++ b/daklib/utils.py @@ -1,9 +1,12 @@ #!/usr/bin/env python +# vim:set et ts=4 sw=4: -# Utility functions -# Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup +"""Utility functions -################################################################################ +@contact: Debian FTP Master +@copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup +@license: GNU General Public License version 2 or later +""" # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by @@ -19,49 +22,66 @@ # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -################################################################################ - -import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \ - sys, tempfile, traceback, stat +import codecs +import commands +import email.Header +import os +import pwd +import select +import socket +import shutil +import sys +import tempfile +import traceback +import stat import apt_pkg import database +import time +import tarfile +import re +import string +import email as modemail from dak_exceptions import * +from regexes import re_html_escaping, html_escaping, re_single_line_field, \ + re_multi_line_field, re_srchasver, re_verwithext, \ + re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark ################################################################################ -re_comments = re.compile(r"\#.*") -re_no_epoch = re.compile(r"^\d+\:") -re_no_revision = re.compile(r"-[^-]+$") -re_arch_from_filename = re.compile(r"/binary-[^/]+/") -re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)") -re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$") -re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$") +default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties +default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used + +alias_cache = None #: Cache for email alias checks +key_uid_email_cache = {} #: Cache for email addresses from gpg key uids + +# (hashname, function, earliest_changes_version) +known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)), + ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc -re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)") -re_multi_line_field = re.compile(r"^\s(.*)") -re_taint_free = re.compile(r"^[-+~/\.\w]+$") +################################################################################ -re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>") -re_gpg_uid = re.compile('^uid.*<([^>]*)>') +def html_escape(s): + """ Escape html chars """ + return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s) -re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$") -re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$") +################################################################################ -re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$") +def open_file(filename, mode='r'): + """ + Open C{file}, return fileobject. -default_config = "/etc/dak/dak.conf" -default_apt_config = "/etc/dak/apt.conf" + @type filename: string + @param filename: path/filename to open -alias_cache = None -key_uid_email_cache = {} + @type mode: string + @param mode: open mode -# (hashname, function, earliest_changes_version) -known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)), - ("sha256", apt_pkg.sha256sum, (1, 8))] + @rtype: fileobject + @return: open fileobject -################################################################################ + @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError. -def open_file(filename, mode='r'): + """ try: f = open(filename, mode) except IOError: @@ -100,34 +120,14 @@ def extract_component_from_section(section): ################################################################################ -def parse_changes(filename, signing_rules=0): - """Parses a changes file and returns a dictionary where each field is a -key. The mandatory first argument is the filename of the .changes -file. - -signing_rules is an optional argument: - - o If signing_rules == -1, no signature is required. - o If signing_rules == 0 (the default), a signature is required. - o If signing_rules == 1, it turns on the same strict format checking - as dpkg-source. - -The rules for (signing_rules == 1)-mode are: - - o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----" - followed by any PGP header data and must end with a blank line. - - o The data section must end with a blank line and must be followed by - "-----BEGIN PGP SIGNATURE-----". -""" - +def parse_deb822(contents, signing_rules=0): error = "" changes = {} - changes_in = open_file(filename) - lines = changes_in.readlines() + # Split the lines in the input, keeping the linebreaks. + lines = contents.splitlines(True) - if not lines: + if len(lines) == 0: raise ParseChangesError, "[Empty changes file]" # Reindex by line number so we can easily verify the format of @@ -193,7 +193,6 @@ The rules for (signing_rules == 1)-mode are: if signing_rules == 1 and inside_signature: raise InvalidDscError, index - changes_in.close() changes["filecontents"] = "".join(lines) if changes.has_key("source"): @@ -211,112 +210,234 @@ The rules for (signing_rules == 1)-mode are: ################################################################################ -def create_hash (lfiles, key, testfn, basedict = None): +def parse_changes(filename, signing_rules=0): + """ + Parses a changes file and returns a dictionary where each field is a + key. The mandatory first argument is the filename of the .changes + file. + + signing_rules is an optional argument: + + - If signing_rules == -1, no signature is required. + - If signing_rules == 0 (the default), a signature is required. + - If signing_rules == 1, it turns on the same strict format checking + as dpkg-source. + + The rules for (signing_rules == 1)-mode are: + + - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----" + followed by any PGP header data and must end with a blank line. + + - The data section must end with a blank line and must be followed by + "-----BEGIN PGP SIGNATURE-----". + """ + + changes_in = open_file(filename) + content = changes_in.read() + changes_in.close() + return parse_deb822(content, signing_rules) + +################################################################################ + +def hash_key(hashname): + return '%ssum' % hashname + +################################################################################ + +def create_hash(where, files, hashname, hashfunc): + """ + create_hash extends the passed files dict with the given hash by + iterating over all files on disk and passing them to the hashing + function given. + """ + rejmsg = [] - for f in lfiles.keys(): + for f in files.keys(): try: file_handle = open_file(f) except CantOpenError: rejmsg.append("Could not open file %s for checksumming" % (f)) - # Check hash - if basedict and basedict.has_key(f): - basedict[f]['%ssum' % key] = testfn(file_handle) - file_handle.close() + files[f][hash_key(hashname)] = hashfunc(file_handle) + file_handle.close() return rejmsg ################################################################################ -def check_hash (where, lfiles, key, testfn, basedict = None): +def check_hash(where, files, hashname, hashfunc): + """ + check_hash checks the given hash in the files dict against the actual + files on disk. The hash values need to be present consistently in + all file entries. It does not modify its input in any way. + """ + rejmsg = [] - if basedict: - for f in basedict.keys(): - if f not in lfiles: - rejmsg.append("%s: no %s checksum" % (f, key)) + for f in files.keys(): + file_handle = None + try: + try: + file_handle = open_file(f) + + # Check for the hash entry, to not trigger a KeyError. + if not files[f].has_key(hash_key(hashname)): + rejmsg.append("%s: misses %s checksum in %s" % (f, hashname, + where)) + continue + + # Actually check the hash for correctness. + if hashfunc(file_handle) != files[f][hash_key(hashname)]: + rejmsg.append("%s: %s check failed in %s" % (f, hashname, + where)) + except CantOpenError: + # TODO: This happens when the file is in the pool. + # warn("Cannot open file %s" % f) + continue + finally: + if file_handle: + file_handle.close() + return rejmsg + +################################################################################ - for f in lfiles.keys(): - if basedict and f not in basedict: - rejmsg.append("%s: extraneous entry in %s checksums" % (f, key)) +def check_size(where, files): + """ + check_size checks the file sizes in the passed files dict against the + files on disk. + """ + rejmsg = [] + for f in files.keys(): try: - file_handle = open_file(f) - except CantOpenError: - continue + entry = os.stat(f) + except OSError, exc: + if exc.errno == 2: + # TODO: This happens when the file is in the pool. + continue + raise - # Check hash - if testfn(file_handle) != lfiles[f][key]: - rejmsg.append("%s: %s check failed." % (f, key)) - file_handle.close() - # Store the hashes for later use - if basedict: - basedict[f]['%ssum' % key] = lfiles[f][key] - # Check size - actual_size = os.stat(f)[stat.ST_SIZE] - size = int(lfiles[f]["size"]) + actual_size = entry[stat.ST_SIZE] + size = int(files[f]["size"]) if size != actual_size: rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s" % (f, actual_size, size, where)) + return rejmsg + +################################################################################ + +def check_hash_fields(what, manifest): + """ + check_hash_fields ensures that there are no checksum fields in the + given dict that we do not know about. + """ + + rejmsg = [] + hashes = map(lambda x: x[0], known_hashes) + for field in manifest: + if field.startswith("checksums-"): + hashname = field.split("-",1)[1] + if hashname not in hashes: + rejmsg.append("Unsupported checksum field for %s "\ + "in %s" % (hashname, what)) + return rejmsg + +################################################################################ + +def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc): + if format >= version: + # The version should contain the specified hash. + func = check_hash + + # Import hashes from the changes + rejmsg = parse_checksums(".changes", files, changes, hashname) + if len(rejmsg) > 0: + return rejmsg + else: + # We need to calculate the hash because it can't possibly + # be in the file. + func = create_hash + return func(".changes", files, hashname, hashfunc) + +# We could add the orig which might be in the pool to the files dict to +# access the checksums easily. + +def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc): + """ + ensure_dsc_hashes' task is to ensure that each and every *present* hash + in the dsc is correct, i.e. identical to the changes file and if necessary + the pool. The latter task is delegated to check_hash. + """ + rejmsg = [] + if not dsc.has_key('Checksums-%s' % (hashname,)): + return rejmsg + # Import hashes from the dsc + parse_checksums(".dsc", dsc_files, dsc, hashname) + # And check it... + rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc)) return rejmsg ################################################################################ def ensure_hashes(changes, dsc, files, dsc_files): - # Make sure we recognise the format of the Files: field - format = changes.get("format", "0.0").split(".",1) + rejmsg = [] + + # Make sure we recognise the format of the Files: field in the .changes + format = changes.get("format", "0.0").split(".", 1) if len(format) == 2: format = int(format[0]), int(format[1]) else: format = int(float(format[0])), 0 - rejmsg = [] - for x in changes: - if x.startswith("checksum-"): - h = x.split("-",1)[1] - if h not in dict(known_hashes): - rejmsg.append("Unsupported checksum field in .changes" % (h)) - - for x in dsc: - if x.startswith("checksum-"): - h = x.split("-",1)[1] - if h not in dict(known_hashes): - rejmsg.append("Unsupported checksum field in .dsc" % (h)) + # We need to deal with the original changes blob, as the fields we need + # might not be in the changes dict serialised into the .dak anymore. + orig_changes = parse_deb822(changes['filecontents']) + + # Copy the checksums over to the current changes dict. This will keep + # the existing modifications to it intact. + for field in orig_changes: + if field.startswith('checksums-'): + changes[field] = orig_changes[field] + + # Check for unsupported hashes + rejmsg.extend(check_hash_fields(".changes", changes)) + rejmsg.extend(check_hash_fields(".dsc", dsc)) # We have to calculate the hash if we have an earlier changes version than # the hash appears in rather than require it exist in the changes file - # I hate backwards compatibility - for h,f,v in known_hashes: - try: - if format < v: - for m in create_hash(files, h, f, files): - rejmsg.append(m) - else: - for m in check_hash(".changes %s" % (h), files, '%ssum' % h, f, files): - rejmsg.append(m) - except NoFilesFieldError: - rejmsg.append("No Checksums-%s: field in .changes" % (h)) - except UnknownFormatError, format: - rejmsg.append("%s: unknown format of .changes" % (format)) - except ParseChangesError, line: - rejmsg.append("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line)) - - if "source" not in changes["architecture"]: continue + for hashname, hashfunc, version in known_hashes: + rejmsg.extend(_ensure_changes_hash(changes, format, version, files, + hashname, hashfunc)) + if "source" in changes["architecture"]: + rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname, + hashfunc)) - try: - if format < v: - for m in create_hash(dsc_files, h, f, dsc_files): - rejmsg.append(m) - else: - for m in check_hash(".dsc %s" % (h), dsc_files, '%ssum' % h, f, dsc_files): - rejmsg.append(m) - except UnknownFormatError, format: - rejmsg.append("%s: unknown format of .dsc" % (format)) - except NoFilesFieldError: - rejmsg.append("No Checksums-%s: field in .dsc" % (h)) - except ParseChangesError, line: - rejmsg.append("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line)) + return rejmsg +def parse_checksums(where, files, manifest, hashname): + rejmsg = [] + field = 'checksums-%s' % hashname + if not field in manifest: + return rejmsg + for line in manifest[field].split('\n'): + if not line: + break + checksum, size, checkfile = line.strip().split(' ') + if not files.has_key(checkfile): + # TODO: check for the file's entry in the original files dict, not + # the one modified by (auto)byhand and other weird stuff + # rejmsg.append("%s: not present in files but in checksums-%s in %s" % + # (file, hashname, where)) + continue + if not files[checkfile]["size"] == size: + rejmsg.append("%s: size differs for files and checksums-%s entry "\ + "in %s" % (checkfile, hashname, where)) + continue + files[checkfile][hash_key(hashname)] = checksum + for f in files.keys(): + if not files[f].has_key(hash_key(hashname)): + rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile, + hashname, where)) return rejmsg ################################################################################ @@ -344,7 +465,10 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): format = format[:2] if is_a_dsc: - if format != (1,0): + # format = (1,0) are the only formats we currently accept, + # format = (0,0) are missing format headers of which we still + # have some in the archive. + if format != (1,0) and format != (0,0): raise UnknownFormatError, "%s" % (changes.get("format","0.0")) else: if (format < (1,5) or format > (1,8)): @@ -384,8 +508,10 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): ################################################################################ def force_to_utf8(s): - """Forces a string to UTF-8. If the string isn't already UTF-8, -it's assumed to be ISO-8859-1.""" + """ + Forces a string to UTF-8. If the string isn't already UTF-8, + it's assumed to be ISO-8859-1. + """ try: unicode(s, 'utf-8') return s @@ -394,8 +520,10 @@ it's assumed to be ISO-8859-1.""" return latin1_s.encode('utf-8') def rfc2047_encode(s): - """Encodes a (header) string per RFC2047 if necessary. If the -string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.""" + """ + Encodes a (header) string per RFC2047 if necessary. If the + string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1. + """ try: codecs.lookup('ascii')[1](s) return s @@ -416,15 +544,18 @@ string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.""" # incompatible!' def fix_maintainer (maintainer): - """Parses a Maintainer or Changed-By field and returns: - (1) an RFC822 compatible version, - (2) an RFC2047 compatible version, - (3) the name - (4) the email - -The name is forced to UTF-8 for both (1) and (3). If the name field -contains '.' or ',' (as allowed by Debian policy), (1) and (2) are -switched to 'email (name)' format.""" + """ + Parses a Maintainer or Changed-By field and returns: + 1. an RFC822 compatible version, + 2. an RFC2047 compatible version, + 3. the name + 4. the email + + The name is forced to UTF-8 for both 1. and 3.. If the name field + contains '.' or ',' (as allowed by Debian policy), 1. and 2. are + switched to 'email (name)' format. + + """ maintainer = maintainer.strip() if not maintainer: return ('', '', '', '') @@ -462,15 +593,76 @@ switched to 'email (name)' format.""" ################################################################################ -# sendmail wrapper, takes _either_ a message string or a file as arguments def send_mail (message, filename=""): - # If we've been passed a string dump it into a temporary file + """sendmail wrapper, takes _either_ a message string or a file as arguments""" + + # If we've been passed a string dump it into a temporary file if message: - filename = tempfile.mktemp() - fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700) + (fd, filename) = tempfile.mkstemp() os.write (fd, message) os.close (fd) + if Cnf.has_key("Dinstall::MailWhiteList") and \ + Cnf["Dinstall::MailWhiteList"] != "": + message_in = open_file(filename) + message_raw = modemail.message_from_file(message_in) + message_in.close(); + + whitelist = []; + whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"]) + try: + for line in whitelist_in: + if re_re_mark.match(line): + whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1))) + else: + whitelist.append(re.compile(re.escape(line.strip()))) + finally: + whitelist_in.close() + + # Fields to check. + fields = ["To", "Bcc", "Cc"] + for field in fields: + # Check each field + value = message_raw.get(field, None) + if value != None: + match = []; + for item in value.split(","): + (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip()) + mail_whitelisted = 0 + for wr in whitelist: + if wr.match(email): + mail_whitelisted = 1 + break + if not mail_whitelisted: + print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"]) + continue + match.append(item) + + # Doesn't have any mail in whitelist so remove the header + if len(match) == 0: + del message_raw[field] + else: + message_raw.replace_header(field, string.join(match, ", ")) + + # Change message fields in order if we don't have a To header + if not message_raw.has_key("To"): + fields.reverse() + for field in fields: + if message_raw.has_key(field): + message_raw[fields[-1]] = message_raw[field] + del message_raw[field] + break + else: + # Clean up any temporary files + # and return, as we removed all recipients. + if message: + os.unlink (filename); + return; + + fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700); + os.write (fd, message_raw.as_string(True)); + os.close (fd); + # Invoke sendmail (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename)) if (result != 0): @@ -581,13 +773,13 @@ def regex_safe (s): ################################################################################ -# Perform a substition of template def TemplateSubst(map, filename): - file = open_file(filename) - template = file.read() + """ Perform a substition of template """ + templatefile = open_file(filename) + template = templatefile.read() for x in map.keys(): template = template.replace(x,map[x]) - file.close() + templatefile.close() return template ################################################################################ @@ -628,8 +820,8 @@ def cc_fix_changes (changes): for j in o.split(): changes["architecture"][j] = 1 -# Sort by source name, source version, 'have source', and then by filename def changes_compare (a, b): + """ Sort by source name, source version, 'have source', and then by filename """ try: a_changes = parse_changes(a) except: @@ -683,13 +875,13 @@ def find_next_free (dest, too_many=100): ################################################################################ def result_join (original, sep = '\t'): - list = [] + resultlist = [] for i in xrange(len(original)): if original[i] == None: - list.append("") + resultlist.append("") else: - list.append(original[i]) - return sep.join(list) + resultlist.append(original[i]) + return sep.join(resultlist) ################################################################################ @@ -707,18 +899,20 @@ def prefix_multi_line_string(str, prefix, include_blank_lines=0): ################################################################################ def validate_changes_file_arg(filename, require_changes=1): - """'filename' is either a .changes or .dak file. If 'filename' is a -.dak file, it's changed to be the corresponding .changes file. The -function then checks if the .changes file a) exists and b) is -readable and returns the .changes filename if so. If there's a -problem, the next action depends on the option 'require_changes' -argument: - - o If 'require_changes' == -1, errors are ignored and the .changes - filename is returned. - o If 'require_changes' == 0, a warning is given and 'None' is returned. - o If 'require_changes' == 1, a fatal error is raised. -""" + """ + 'filename' is either a .changes or .dak file. If 'filename' is a + .dak file, it's changed to be the corresponding .changes file. The + function then checks if the .changes file a) exists and b) is + readable and returns the .changes filename if so. If there's a + problem, the next action depends on the option 'require_changes' + argument: + + - If 'require_changes' == -1, errors are ignored and the .changes + filename is returned. + - If 'require_changes' == 0, a warning is given and 'None' is returned. + - If 'require_changes' == 1, a fatal error is raised. + + """ error = None orig_filename = filename @@ -777,8 +971,8 @@ def get_conf(): ################################################################################ -# Handle -a, -c and -s arguments; returns them as SQL constraints def parse_args(Options): + """ Handle -a, -c and -s arguments; returns them as SQL constraints """ # Process suite if Options["Suite"]: suite_ids_list = [] @@ -874,10 +1068,13 @@ def try_with_debug(function): ################################################################################ -# Function for use in sorting lists of architectures. -# Sorts normally except that 'source' dominates all others. - def arch_compare_sw (a, b): + """ + Function for use in sorting lists of architectures. + + Sorts normally except that 'source' dominates all others. + """ + if a == "source" and b == "source": return 0 elif a == "source": @@ -889,13 +1086,15 @@ def arch_compare_sw (a, b): ################################################################################ -# Split command line arguments which can be separated by either commas -# or whitespace. If dwim is set, it will complain about string ending -# in comma since this usually means someone did 'dak ls -a i386, m68k -# foo' or something and the inevitable confusion resulting from 'm68k' -# being treated as an argument is undesirable. - def split_args (s, dwim=1): + """ + Split command line arguments which can be separated by either commas + or whitespace. If dwim is set, it will complain about string ending + in comma since this usually means someone did 'dak ls -a i386, m68k + foo' or something and the inevitable confusion resulting from 'm68k' + being treated as an argument is undesirable. + """ + if s.find(",") == -1: return s.split() else: @@ -909,9 +1108,12 @@ def Dict(**dict): return dict ######################################## -# Our very own version of commands.getouputstatus(), hacked to support -# gpgv's status fd. def gpgv_get_status_output(cmd, status_read, status_write): + """ + Our very own version of commands.getouputstatus(), hacked to support + gpgv's status fd. + """ + cmd = ['/bin/sh', '-c', cmd] p2cread, p2cwrite = os.pipe() c2pread, c2pwrite = os.pipe() @@ -1001,9 +1203,11 @@ def process_gpgv_output(status): ################################################################################ def retrieve_key (filename, keyserver=None, keyring=None): - """Retrieve the key that signed 'filename' from 'keyserver' and -add it to 'keyring'. Returns nothing on success, or an error message -on error.""" + """ + Retrieve the key that signed 'filename' from 'keyserver' and + add it to 'keyring'. Returns nothing on success, or an error message + on error. + """ # Defaults for keyserver and keyring if not keyserver: @@ -1016,7 +1220,7 @@ on error.""" return "%s: tainted filename" % (filename) # Invoke gpgv on the file - status_read, status_write = os.pipe(); + status_read, status_write = os.pipe() cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename) (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write) @@ -1053,18 +1257,20 @@ def gpg_keyring_args(keyrings=None): ################################################################################ def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None): - """Check the signature of a file and return the fingerprint if the -signature is valid or 'None' if it's not. The first argument is the -filename whose signature should be checked. The second argument is a -reject function and is called when an error is found. The reject() -function must allow for two arguments: the first is the error message, -the second is an optional prefix string. It's possible for reject() -to be called more than once during an invocation of check_signature(). -The third argument is optional and is the name of the files the -detached signature applies to. The fourth argument is optional and is -a *list* of keyrings to use. 'autofetch' can either be None, True or -False. If None, the default behaviour specified in the config will be -used.""" + """ + Check the signature of a file and return the fingerprint if the + signature is valid or 'None' if it's not. The first argument is the + filename whose signature should be checked. The second argument is a + reject function and is called when an error is found. The reject() + function must allow for two arguments: the first is the error message, + the second is an optional prefix string. It's possible for reject() + to be called more than once during an invocation of check_signature(). + The third argument is optional and is the name of the files the + detached signature applies to. The fourth argument is optional and is + a *list* of keyrings to use. 'autofetch' can either be None, True or + False. If None, the default behaviour specified in the config will be + used. + """ # Ensure the filename contains no shell meta-characters or other badness if not re_taint_free.match(sig_filename): @@ -1088,7 +1294,7 @@ used.""" return None # Build the command line - status_read, status_write = os.pipe(); + status_read, status_write = os.pipe() cmd = "gpgv --status-fd %s %s %s %s" % ( status_write, gpg_keyring_args(keyrings), sig_filename, data_filename) @@ -1128,11 +1334,25 @@ used.""" if keywords.has_key("NODATA"): reject("no signature found in %s." % (sig_filename)) bad = 1 + if keywords.has_key("EXPKEYSIG"): + args = keywords["EXPKEYSIG"] + if len(args) >= 1: + key = args[0] + reject("Signature made by expired key 0x%s" % (key)) + bad = 1 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"): args = keywords["KEYEXPIRED"] + expiredate="" if len(args) >= 1: - key = args[0] - reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename)) + timestamp = args[0] + if timestamp.count("T") == 0: + try: + expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp))) + except ValueError: + expiredate = "unknown (%s)" % (timestamp) + else: + expiredate = timestamp + reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate)) bad = 1 if bad: @@ -1234,9 +1454,11 @@ def wrap(paragraph, max_length, prefix=""): ################################################################################ -# Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'. -# Returns fixed 'src' def clean_symlink (src, dest, root): + """ + Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'. + Returns fixed 'src' + """ src = src.replace(root, '', 1) dest = dest.replace(root, '', 1) dest = os.path.dirname(dest) @@ -1245,32 +1467,22 @@ def clean_symlink (src, dest, root): ################################################################################ -def temp_filename(directory=None, dotprefix=None, perms=0700): - """Return a secure and unique filename by pre-creating it. -If 'directory' is non-null, it will be the directory the file is pre-created in. -If 'dotprefix' is non-null, the filename will be prefixed with a '.'.""" - - if directory: - old_tempdir = tempfile.tempdir - tempfile.tempdir = directory +def temp_filename(directory=None, prefix="dak", suffix=""): + """ + Return a secure and unique filename by pre-creating it. + If 'directory' is non-null, it will be the directory the file is pre-created in. + If 'prefix' is non-null, the filename will be prefixed with it, default is dak. + If 'suffix' is non-null, the filename will end with it. - filename = tempfile.mktemp() + Returns a pair (fd, name). + """ - if dotprefix: - filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename)) - fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms) - os.close(fd) - - if directory: - tempfile.tempdir = old_tempdir - - return filename + return tempfile.mkstemp(suffix, prefix, directory) ################################################################################ -# checks if the user part of the email is listed in the alias file - def is_email_alias(email): + """ checks if the user part of the email is listed in the alias file """ global alias_cache if alias_cache == None: aliasfn = which_alias_file() @@ -1292,3 +1504,53 @@ if which_conf_file() != default_config: apt_pkg.ReadConfigFileISC(Cnf,which_conf_file()) ################################################################################ + +def generate_contents_information(filename): + """ + Generate a list of flies contained in a .deb + + @type filename: string + @param filename: the path to a data.tar.gz or data.tar.bz2 + + @rtype: list + @return: a list of files in the data.tar.* portion of the .deb + """ + cmd = "ar t %s" % (filename) + (result, output) = commands.getstatusoutput(cmd) + if result != 0: + reject("%s: 'ar t' invocation failed." % (filename)) + reject(utils.prefix_multi_line_string(output, " [ar output:] "), "") + + # Ugh ... this is ugly ... Code ripped from process_unchecked.py + chunks = output.split('\n') + + contents = [] + try: + cmd = "ar x %s %s" % (filename, chunks[2]) + (result, output) = commands.getstatusoutput(cmd) + if result != 0: + reject("%s: '%s' invocation failed." % (filename, cmd)) + reject(utils.prefix_multi_line_string(output, " [ar output:] "), "") + + # Got deb tarballs, now lets go through and determine what bits + # and pieces the deb had ... + if chunks[2] == "data.tar.gz": + data = tarfile.open("data.tar.gz", "r:gz") + elif chunks[2] == "data.tar.bz2": + data = tarfile.open("data.tar.bz2", "r:bz2") + else: + os.remove(chunks[2]) + reject("couldn't find data.tar.*") + + for tarinfo in data: + if not tarinfo.isdir(): + contents.append(tarinfo.name[2:]) + + finally: + if os.path.exists( chunks[2] ): + shutil.rmtree( chunks[2] ) + os.remove( chunks[2] ) + + return contents + +###############################################################################