X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Futils.py;h=a30107be57632d18b3f604cd8a4094395fd19de8;hb=2a10967af5974e4fa513d029fb995e122d90501a;hp=fd4d7bbf8ed2fa6478836c229d8f138b874ec6f7;hpb=e51c37d2560d411945779e035115fc3d11dedb8e;p=dak.git diff --git a/daklib/utils.py b/daklib/utils.py old mode 100755 new mode 100644 index fd4d7bbf..a30107be --- a/daklib/utils.py +++ b/daklib/utils.py @@ -23,9 +23,12 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import commands +import codecs +import datetime import email.Header import os import pwd +import grp import select import socket import shutil @@ -33,20 +36,30 @@ import sys import tempfile import traceback import stat +import apt_inst import apt_pkg import time import re import email as modemail import subprocess - -from dbconn import DBConn, get_architecture, get_component, get_suite, get_override_type, Keyring, session_wrapper +import ldap +import errno + +import daklib.config as config +import daklib.daksubprocess +from dbconn import DBConn, get_architecture, get_component, get_suite, \ + get_override_type, Keyring, session_wrapper, \ + get_active_keyring_paths, get_primary_keyring_path, \ + get_suite_architectures, get_or_set_metadatakey, DBSource, \ + Component, Override, OverrideType +from sqlalchemy import desc from dak_exceptions import * from gpg import SignedFile from textutils import fix_maintainer from regexes import re_html_escaping, html_escaping, re_single_line_field, \ re_multi_line_field, re_srchasver, re_taint_free, \ - re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \ - re_is_orig_source + re_re_mark, re_whitespace_comment, re_issource, \ + re_is_orig_source, re_build_dep_arch, re_parse_maintainer from formats import parse_format, validate_changes_format from srcformats import get_format_from_string @@ -55,7 +68,6 @@ from collections import defaultdict ################################################################################ default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties -default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used alias_cache = None #: Cache for email alias checks key_uid_email_cache = {} #: Cache for email addresses from gpg key uids @@ -68,7 +80,7 @@ known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)), # code in lenny's Python. This also affects commands.getoutput and # commands.getstatus. def dak_getstatusoutput(cmd): - pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True, + pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output = pipe.stdout.read() @@ -112,7 +124,7 @@ def open_file(filename, mode='r'): try: f = open(filename, mode) except IOError: - raise CantOpenError, filename + raise CantOpenError(filename) return f ################################################################################ @@ -135,7 +147,7 @@ def our_raw_input(prompt=""): ################################################################################ -def extract_component_from_section(section): +def extract_component_from_section(section, session=None): component = "" if section.find('/') != -1: @@ -143,10 +155,7 @@ def extract_component_from_section(section): # Expand default component if component == "": - if Cnf.has_key("Component::%s" % section): - component = section - else: - component = "main" + component = "main" return (section, component) @@ -168,7 +177,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None) lines = contents.splitlines(True) if len(lines) == 0: - raise ParseChangesError, "[Empty changes file]" + raise ParseChangesError("[Empty changes file]") # Reindex by line number so we can easily verify the format of # .dsc files... @@ -186,7 +195,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None) line = indexed_lines[index] if line == "" and signing_rules == 1: if index != num_of_lines: - raise InvalidDscError, index + raise InvalidDscError(index) break slf = re_single_line_field.match(line) if slf: @@ -200,7 +209,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None) mlf = re_multi_line_field.match(line) if mlf: if first == -1: - raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line) + raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line)) if first == 1 and changes[field] != "": changes[field] += '\n' first = 0 @@ -219,7 +228,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None) changes["source-version"] = srcver.group(2) if error: - raise ParseChangesError, error + raise ParseChangesError(error) return changes @@ -247,13 +256,12 @@ def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None): "-----BEGIN PGP SIGNATURE-----". """ - changes_in = open_file(filename) - content = changes_in.read() - changes_in.close() + with open_file(filename) as changes_in: + content = changes_in.read() try: unicode(content, 'utf-8') except UnicodeError: - raise ChangesUnicodeError, "Changes file not proper utf-8" + raise ChangesUnicodeError("Changes file not proper utf-8") changes = parse_deb822(content, signing_rules, keyrings=keyrings) @@ -268,7 +276,7 @@ def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None): missingfields.append(keyword) if len(missingfields): - raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields) + raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)) return changes @@ -310,11 +318,8 @@ def check_hash(where, files, hashname, hashfunc): rejmsg = [] for f in files.keys(): - file_handle = None try: - try: - file_handle = open_file(f) - + with open_file(f) as file_handle: # Check for the hash entry, to not trigger a KeyError. if not files[f].has_key(hash_key(hashname)): rejmsg.append("%s: misses %s checksum in %s" % (f, hashname, @@ -325,13 +330,10 @@ def check_hash(where, files, hashname, hashfunc): if hashfunc(file_handle) != files[f][hash_key(hashname)]: rejmsg.append("%s: %s check failed in %s" % (f, hashname, where)) - except CantOpenError: - # TODO: This happens when the file is in the pool. - # warn("Cannot open file %s" % f) - continue - finally: - if file_handle: - file_handle.close() + except CantOpenError: + # TODO: This happens when the file is in the pool. + # warn("Cannot open file %s" % f) + continue return rejmsg ################################################################################ @@ -346,8 +348,8 @@ def check_size(where, files): for f in files.keys(): try: entry = os.stat(f) - except OSError, exc: - if exc.errno == 2: + except OSError as exc: + if exc.errno == errno.ENOENT: # TODO: This happens when the file is in the pool. continue raise @@ -361,7 +363,7 @@ def check_size(where, files): ################################################################################ -def check_dsc_files(dsc_filename, dsc=None, dsc_files=None): +def check_dsc_files(dsc_filename, dsc, dsc_files): """ Verify that the files listed in the Files field of the .dsc are those expected given the announced Format. @@ -380,13 +382,6 @@ def check_dsc_files(dsc_filename, dsc=None, dsc_files=None): """ rejmsg = [] - # Parse the file if needed - if dsc is None: - dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1); - - if dsc_files is None: - dsc_files = build_file_list(dsc, is_a_dsc=1) - # Ensure .dsc lists proper set of source files according to the format # announced has = defaultdict(lambda: 0) @@ -401,7 +396,7 @@ def check_dsc_files(dsc_filename, dsc=None, dsc_files=None): (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)), ) - for f in dsc_files.keys(): + for f in dsc_files: m = re_issource.match(f) if not m: rejmsg.append("%s: %s in Files field not recognised as source." @@ -522,8 +517,7 @@ def parse_checksums(where, files, manifest, hashname): files[checkfile][hash_key(hashname)] = checksum for f in files.keys(): if not files[f].has_key(hash_key(hashname)): - rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile, - hashname, where)) + rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where)) return rejmsg ################################################################################ @@ -555,7 +549,7 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): else: (md5, size, name) = s except ValueError: - raise ParseChangesError, i + raise ParseChangesError(i) if section == "": section = "-" @@ -572,48 +566,52 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): ################################################################################ -# see http://bugs.debian.org/619131 -def build_package_set(dsc, session = None): - if not dsc.has_key("package-set"): +# see https://bugs.debian.org/619131 +def build_package_list(dsc, session = None): + if not dsc.has_key("package-list"): return {} packages = {} - for line in dsc["package-set"].split("\n"): + for line in dsc["package-list"].split("\n"): if not line: break - (name, section, priority) = line.split() - (section, component) = extract_component_from_section(section) - - package_type = "deb" - if name.find(":") != -1: - (package_type, name) = name.split(":", 1) - if package_type == "src": - package_type = "dsc" + fields = line.split() + name = fields[0] + package_type = fields[1] + (section, component) = extract_component_from_section(fields[2]) + priority = fields[3] # Validate type if we have a session if session and get_override_type(package_type, session) is None: # Maybe just warn and ignore? exit(1) might be a bit hard... - utils.fubar("invalid type (%s) in Package-Set." % (package_type)) - - if section == "": - section = "-" - if priority == "": - priority = "-" - - if package_type == "dsc": - priority = "source" + utils.fubar("invalid type (%s) in Package-List." % (package_type)) - if not packages.has_key(name) or packages[name]["type"] == "dsc": + if name not in packages or packages[name]["type"] == "dsc": packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[]) return packages ################################################################################ -def send_mail (message, filename=""): - """sendmail wrapper, takes _either_ a message string or a file as arguments""" +def send_mail (message, filename="", whitelists=None): + """sendmail wrapper, takes _either_ a message string or a file as arguments + + @type whitelists: list of (str or None) + @param whitelists: path to whitelists. C{None} or an empty list whitelists + everything, otherwise an address is whitelisted if it is + included in any of the lists. + In addition a global whitelist can be specified in + Dinstall::MailWhiteList. + """ + + maildir = Cnf.get('Dir::Mail') + if maildir: + path = os.path.join(maildir, datetime.datetime.now().isoformat()) + path = find_next_free(path) + with open(path, 'w') as fh: + print >>fh, message, # Check whether we're supposed to be sending mail if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]: @@ -625,23 +623,23 @@ def send_mail (message, filename=""): os.write (fd, message) os.close (fd) - if Cnf.has_key("Dinstall::MailWhiteList") and \ - Cnf["Dinstall::MailWhiteList"] != "": - message_in = open_file(filename) - message_raw = modemail.message_from_file(message_in) - message_in.close(); + if whitelists is None or None in whitelists: + whitelists = [] + if Cnf.get('Dinstall::MailWhiteList', ''): + whitelists.append(Cnf['Dinstall::MailWhiteList']) + if len(whitelists) != 0: + with open_file(filename) as message_in: + message_raw = modemail.message_from_file(message_in) whitelist = []; - whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"]) - try: + for path in whitelists: + with open_file(path, 'r') as whitelist_in: for line in whitelist_in: if not re_whitespace_comment.match(line): if re_re_mark.match(line): whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1))) else: whitelist.append(re.compile(re.escape(line.strip()))) - finally: - whitelist_in.close() # Fields to check. fields = ["To", "Bcc", "Cc"] @@ -658,7 +656,7 @@ def send_mail (message, filename=""): mail_whitelisted = 1 break if not mail_whitelisted: - print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"]) + print "Skipping {0} since it's not whitelisted".format(item) continue match.append(item) @@ -683,14 +681,14 @@ def send_mail (message, filename=""): os.unlink (filename); return; - fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700); + fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700); os.write (fd, message_raw.as_string(True)); os.close (fd); # Invoke sendmail (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename)) if (result != 0): - raise SendmailFailedError, output + raise SendmailFailedError(output) # Clean up any temporary files if message: @@ -698,30 +696,28 @@ def send_mail (message, filename=""): ################################################################################ -def poolify (source, component): - if component: - component += '/' +def poolify (source, component=None): if source[:3] == "lib": - return component + source[:4] + '/' + source + '/' + return source[:4] + '/' + source + '/' else: - return component + source[:1] + '/' + source + '/' + return source[:1] + '/' + source + '/' ################################################################################ -def move (src, dest, overwrite = 0, perms = 0664): +def move (src, dest, overwrite = 0, perms = 0o664): if os.path.exists(dest) and os.path.isdir(dest): dest_dir = dest else: dest_dir = os.path.dirname(dest) - if not os.path.exists(dest_dir): + if not os.path.lexists(dest_dir): umask = os.umask(00000) - os.makedirs(dest_dir, 02775) + os.makedirs(dest_dir, 0o2775) os.umask(umask) #print "Moving %s to %s..." % (src, dest) if os.path.exists(dest) and os.path.isdir(dest): dest += '/' + os.path.basename(src) # Don't overwrite unless forced to - if os.path.exists(dest): + if os.path.lexists(dest): if not overwrite: fubar("Can't move %s to %s - file already exists." % (src, dest)) else: @@ -731,20 +727,20 @@ def move (src, dest, overwrite = 0, perms = 0664): os.chmod(dest, perms) os.unlink(src) -def copy (src, dest, overwrite = 0, perms = 0664): +def copy (src, dest, overwrite = 0, perms = 0o664): if os.path.exists(dest) and os.path.isdir(dest): dest_dir = dest else: dest_dir = os.path.dirname(dest) if not os.path.exists(dest_dir): umask = os.umask(00000) - os.makedirs(dest_dir, 02775) + os.makedirs(dest_dir, 0o2775) os.umask(umask) #print "Copying %s to %s..." % (src, dest) if os.path.exists(dest) and os.path.isdir(dest): dest += '/' + os.path.basename(src) # Don't overwrite unless forced to - if os.path.exists(dest): + if os.path.lexists(dest): if not overwrite: raise FileExistsError else: @@ -755,25 +751,17 @@ def copy (src, dest, overwrite = 0, perms = 0664): ################################################################################ -def where_am_i (): - res = socket.getfqdn() - database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname") - if database_hostname: - return database_hostname - else: - return res - def which_conf_file (): if os.getenv('DAK_CONFIG'): return os.getenv('DAK_CONFIG') res = socket.getfqdn() # In case we allow local config files per user, try if one exists - if Cnf.FindB("Config::" + res + "::AllowLocalConfig"): + if Cnf.find_b("Config::" + res + "::AllowLocalConfig"): homedir = os.getenv("HOME") confpath = os.path.join(homedir, "/etc/dak.conf") if os.path.exists(confpath): - apt_pkg.ReadConfigFileISC(Cnf,default_config) + apt_pkg.read_config_file_isc(Cnf,confpath) # We are still in here, so there is no local config file or we do # not allow local files. Do the normal stuff. @@ -782,37 +770,14 @@ def which_conf_file (): return default_config -def which_apt_conf_file (): - res = socket.getfqdn() - # In case we allow local config files per user, try if one exists - if Cnf.FindB("Config::" + res + "::AllowLocalConfig"): - homedir = os.getenv("HOME") - confpath = os.path.join(homedir, "/etc/dak.conf") - if os.path.exists(confpath): - apt_pkg.ReadConfigFileISC(Cnf,default_config) - - if Cnf.get("Config::" + res + "::AptConfig"): - return Cnf["Config::" + res + "::AptConfig"] - else: - return default_apt_config - -def which_alias_file(): - hostname = socket.getfqdn() - aliasfn = '/var/lib/misc/'+hostname+'/forward-alias' - if os.path.exists(aliasfn): - return aliasfn - else: - return None - ################################################################################ def TemplateSubst(subst_map, filename): """ Perform a substition of template """ - templatefile = open_file(filename) - template = templatefile.read() + with open_file(filename) as templatefile: + template = templatefile.read() for k, v in subst_map.iteritems(): template = template.replace(k, str(v)) - templatefile.close() return template ################################################################################ @@ -881,7 +846,7 @@ def changes_compare (a, b): # Sort by source version a_version = a_changes.get("version", "0") b_version = b_changes.get("version", "0") - q = apt_pkg.VersionCompare(a_version, b_version) + q = apt_pkg.version_compare(a_version, b_version) if q: return q @@ -901,7 +866,7 @@ def changes_compare (a, b): def find_next_free (dest, too_many=100): extra = 0 orig_dest = dest - while os.path.exists(dest) and extra < too_many: + while os.path.lexists(dest) and extra < too_many: dest = orig_dest + '.' + repr(extra) extra += 1 if extra >= too_many: @@ -1018,8 +983,8 @@ def parse_args(Options): suite_ids_list = [] for suitename in split_args(Options["Suite"]): suite = get_suite(suitename, session=session) - if suite.suite_id is None: - warn("suite '%s' not recognised." % (suite.suite_name)) + if not suite or suite.suite_id is None: + warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename)) else: suite_ids_list.append(suite.suite_id) if suite_ids_list: @@ -1071,43 +1036,6 @@ def parse_args(Options): ################################################################################ -# Inspired(tm) by Bryn Keller's print_exc_plus (See -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215) - -def print_exc(): - tb = sys.exc_info()[2] - while tb.tb_next: - tb = tb.tb_next - stack = [] - frame = tb.tb_frame - while frame: - stack.append(frame) - frame = frame.f_back - stack.reverse() - traceback.print_exc() - for frame in stack: - print "\nFrame %s in %s at line %s" % (frame.f_code.co_name, - frame.f_code.co_filename, - frame.f_lineno) - for key, value in frame.f_locals.items(): - print "\t%20s = " % key, - try: - print value - except: - print "" - -################################################################################ - -def try_with_debug(function): - try: - function() - except SystemExit: - raise - except: - print_exc() - -################################################################################ - def arch_compare_sw (a, b): """ Function for use in sorting lists of architectures. @@ -1126,7 +1054,7 @@ def arch_compare_sw (a, b): ################################################################################ -def split_args (s, dwim=1): +def split_args (s, dwim=True): """ Split command line arguments which can be separated by either commas or whitespace. If dwim is set, it will complain about string ending @@ -1249,7 +1177,7 @@ def retrieve_key (filename, keyserver=None, keyring=None): if not keyserver: keyserver = Cnf["Dinstall::KeyServer"] if not keyring: - keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0] + keyring = get_primary_keyring_path() # Ensure the filename contains no shell meta-characters or other badness if not re_taint_free.match(filename): @@ -1286,7 +1214,7 @@ def retrieve_key (filename, keyserver=None, keyring=None): def gpg_keyring_args(keyrings=None): if not keyrings: - keyrings = Cnf.ValueList("Dinstall::GPGKeyring") + keyrings = get_active_keyring_paths() return " ".join(["--keyring %s" % x for x in keyrings]) @@ -1431,50 +1359,82 @@ def gpg_get_key_addresses(fingerprint): addresses = key_uid_email_cache.get(fingerprint) if addresses != None: return addresses - addresses = set() - cmd = "gpg --no-default-keyring %s --fingerprint %s" \ - % (gpg_keyring_args(), fingerprint) - (result, output) = commands.getstatusoutput(cmd) - if result == 0: + addresses = list() + try: + with open(os.devnull, "wb") as devnull: + output = daklib.daksubprocess.check_output( + ["gpg", "--no-default-keyring"] + gpg_keyring_args().split() + + ["--with-colons", "--list-keys", fingerprint], stderr=devnull) + except subprocess.CalledProcessError: + pass + else: for l in output.split('\n'): - m = re_gpg_uid.match(l) - if m: - addresses.add(m.group(1)) + parts = l.split(':') + if parts[0] not in ("uid", "pub"): + continue + try: + uid = parts[9] + except IndexError: + continue + try: + # Do not use unicode_escape, because it is locale-specific + uid = codecs.decode(uid, "string_escape").decode("utf-8") + except UnicodeDecodeError: + uid = uid.decode("latin1") # does not fail + m = re_parse_maintainer.match(uid) + if not m: + continue + address = m.group(2) + address = address.encode("utf8") # dak still uses bytes + if address.endswith('@debian.org'): + # prefer @debian.org addresses + # TODO: maybe not hardcode the domain + addresses.insert(0, address) + else: + addresses.append(address) key_uid_email_cache[fingerprint] = addresses return addresses ################################################################################ -# Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603 - -def wrap(paragraph, max_length, prefix=""): - line = "" - s = "" - have_started = 0 - words = paragraph.split() +def get_logins_from_ldap(fingerprint='*'): + """retrieve login from LDAP linked to a given fingerprint""" + + LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn'] + LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer'] + l = ldap.open(LDAPServer) + l.simple_bind_s('','') + Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL, + '(keyfingerprint=%s)' % fingerprint, + ['uid', 'keyfingerprint']) + login = {} + for elem in Attrs: + login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0] + return login - for word in words: - word_size = len(word) - if word_size > max_length: - if have_started: - s += line + '\n' + prefix - s += word + '\n' + prefix - else: - if have_started: - new_length = len(line) + word_size + 1 - if new_length > max_length: - s += line + '\n' + prefix - line = word - else: - line += ' ' + word - else: - line = word - have_started = 1 - - if have_started: - s += line +################################################################################ - return s +def get_users_from_ldap(): + """retrieve login and user names from LDAP""" + + LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn'] + LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer'] + l = ldap.open(LDAPServer) + l.simple_bind_s('','') + Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL, + '(uid=*)', ['uid', 'cn', 'mn', 'sn']) + users = {} + for elem in Attrs: + elem = elem[1] + name = [] + for k in ('cn', 'mn', 'sn'): + try: + if elem[k][0] != '-': + name.append(elem[k][0]) + except KeyError: + pass + users[' '.join(name)] = elem['uid'][0] + return users ################################################################################ @@ -1491,31 +1451,70 @@ def clean_symlink (src, dest, root): ################################################################################ -def temp_filename(directory=None, prefix="dak", suffix=""): +def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None): """ Return a secure and unique filename by pre-creating it. - If 'directory' is non-null, it will be the directory the file is pre-created in. - If 'prefix' is non-null, the filename will be prefixed with it, default is dak. - If 'suffix' is non-null, the filename will end with it. - Returns a pair (fd, name). + @type directory: str + @param directory: If non-null it will be the directory the file is pre-created in. + + @type prefix: str + @param prefix: The filename will be prefixed with this string + + @type suffix: str + @param suffix: The filename will end with this string + + @type mode: str + @param mode: If set the file will get chmodded to those permissions + + @type group: str + @param group: If set the file will get chgrped to the specified group. + + @rtype: list + @return: Returns a pair (fd, name) """ - return tempfile.mkstemp(suffix, prefix, directory) + (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory) + if mode: + os.chmod(tfname, mode) + if group: + gid = grp.getgrnam(group).gr_gid + os.chown(tfname, -1, gid) + return (tfd, tfname) ################################################################################ -def temp_dirname(parent=None, prefix="dak", suffix=""): +def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None): """ Return a secure and unique directory by pre-creating it. - If 'parent' is non-null, it will be the directory the directory is pre-created in. - If 'prefix' is non-null, the filename will be prefixed with it, default is dak. - If 'suffix' is non-null, the filename will end with it. - Returns a pathname to the new directory + @type parent: str + @param parent: If non-null it will be the directory the directory is pre-created in. + + @type prefix: str + @param prefix: The filename will be prefixed with this string + + @type suffix: str + @param suffix: The filename will end with this string + + @type mode: str + @param mode: If set the file will get chmodded to those permissions + + @type group: str + @param group: If set the file will get chgrped to the specified group. + + @rtype: list + @return: Returns a pair (fd, name) + """ - return tempfile.mkdtemp(suffix, prefix, parent) + tfname = tempfile.mkdtemp(suffix, prefix, parent) + if mode: + os.chmod(tfname, mode) + if group: + gid = grp.getgrnam(group).gr_gid + os.chown(tfname, -1, gid) + return tfname ################################################################################ @@ -1545,27 +1544,20 @@ def get_changes_files(from_dir): # Much of the rest of p-u/p-a depends on being in the right place os.chdir(from_dir) changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')] - except OSError, e: + except OSError as e: fubar("Failed to read list from directory %s (%s)" % (from_dir, e)) return changes_files ################################################################################ -apt_pkg.init() - -Cnf = apt_pkg.newConfiguration() -if not os.getenv("DAK_TEST"): - apt_pkg.ReadConfigFileISC(Cnf,default_config) - -if which_conf_file() != default_config: - apt_pkg.ReadConfigFileISC(Cnf,which_conf_file()) +Cnf = config.Config().Cnf ################################################################################ def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"): """ - Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm + Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm Well, actually it parsed a local copy, but let's document the source somewhere ;) @@ -1577,9 +1569,9 @@ def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/w try: f = open(file) lines = f.readlines() - except IOError, e: + except IOError as e: print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file - lines = [] + lines = [] wnpp = {} for line in lines: @@ -1617,7 +1609,6 @@ def get_packages_from_ftp(root, suite, component, architecture): @rtype: TagFile @return: apt_pkg class containing package data - """ filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture) (fd, temp_file) = temp_filename() @@ -1630,6 +1621,247 @@ def get_packages_from_ftp(root, suite, component, architecture): if (result != 0): fubar("Gunzip invocation failed!\n%s\n" % (output), result) packages = open_file(temp_file) - Packages = apt_pkg.ParseTagFile(packages) + Packages = apt_pkg.TagFile(packages) os.unlink(temp_file) return Packages + +################################################################################ + +def deb_extract_control(fh): + """extract DEBIAN/control from a binary package""" + return apt_inst.DebFile(fh).control.extractdata("control") + +################################################################################ + +def mail_addresses_for_upload(maintainer, changed_by, fingerprint): + """mail addresses to contact for an upload + + @type maintainer: str + @param maintainer: Maintainer field of the .changes file + + @type changed_by: str + @param changed_by: Changed-By field of the .changes file + + @type fingerprint: str + @param fingerprint: fingerprint of the key used to sign the upload + + @rtype: list of str + @return: list of RFC 2047-encoded mail addresses to contact regarding + this upload + """ + addresses = [maintainer] + if changed_by != maintainer: + addresses.append(changed_by) + + fpr_addresses = gpg_get_key_addresses(fingerprint) + if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses: + addresses.append(fpr_addresses[0]) + + encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ] + return encoded_addresses + +################################################################################ + +def call_editor(text="", suffix=".txt"): + """run editor and return the result as a string + + @type text: str + @param text: initial text + + @type suffix: str + @param suffix: extension for temporary file + + @rtype: str + @return: string with the edited text + """ + editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi')) + tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) + try: + print >>tmp, text, + tmp.close() + daklib.daksubprocess.check_call([editor, tmp.name]) + return open(tmp.name, 'r').read() + finally: + os.unlink(tmp.name) + +################################################################################ + +def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False): + dbsuite = get_suite(suite, session) + overridesuite = dbsuite + if dbsuite.overridesuite is not None: + overridesuite = get_suite(dbsuite.overridesuite, session) + dep_problem = 0 + p2c = {} + all_broken = {} + if arches: + all_arches = set(arches) + else: + all_arches = set([x.arch_string for x in get_suite_architectures(suite)]) + all_arches -= set(["source", "all"]) + metakey_d = get_or_set_metadatakey("Depends", session) + metakey_p = get_or_set_metadatakey("Provides", session) + params = { + 'suite_id': dbsuite.suite_id, + 'metakey_d_id': metakey_d.key_id, + 'metakey_p_id': metakey_p.key_id, + } + for architecture in all_arches | set(['all']): + deps = {} + sources = {} + virtual_packages = {} + params['arch_id'] = get_architecture(architecture, session).arch_id + + statement = ''' + SELECT b.id, b.package, s.source, c.name as component, + (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends, + (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides + FROM binaries b + JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id + JOIN source s ON b.source = s.id + JOIN files_archive_map af ON b.file = af.file_id + JOIN component c ON af.component_id = c.id + WHERE b.architecture = :arch_id''' + query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \ + from_statement(statement).params(params) + for binary_id, package, source, component, depends, provides in query: + sources[package] = source + p2c[package] = component + if depends is not None: + deps[package] = depends + # Maintain a counter for each virtual package. If a + # Provides: exists, set the counter to 0 and count all + # provides by a package not in the list for removal. + # If the counter stays 0 at the end, we know that only + # the to-be-removed packages provided this virtual + # package. + if provides is not None: + for virtual_pkg in provides.split(","): + virtual_pkg = virtual_pkg.strip() + if virtual_pkg == package: continue + if not virtual_packages.has_key(virtual_pkg): + virtual_packages[virtual_pkg] = 0 + if package not in removals: + virtual_packages[virtual_pkg] += 1 + + # If a virtual package is only provided by the to-be-removed + # packages, treat the virtual package as to-be-removed too. + for virtual_pkg in virtual_packages.keys(): + if virtual_packages[virtual_pkg] == 0: + removals.append(virtual_pkg) + + # Check binary dependencies (Depends) + for package in deps.keys(): + if package in removals: continue + parsed_dep = [] + try: + parsed_dep += apt_pkg.parse_depends(deps[package]) + except ValueError as e: + print "Error for package %s: %s" % (package, e) + for dep in parsed_dep: + # Check for partial breakage. If a package has a ORed + # dependency, there is only a dependency problem if all + # packages in the ORed depends will be removed. + unsat = 0 + for dep_package, _, _ in dep: + if dep_package in removals: + unsat += 1 + if unsat == len(dep): + component = p2c[package] + source = sources[package] + if component != "main": + source = "%s/%s" % (source, component) + all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture) + dep_problem = 1 + + if all_broken: + if cruft: + print " - broken Depends:" + else: + print "# Broken Depends:" + for source, bindict in sorted(all_broken.items()): + lines = [] + for binary, arches in sorted(bindict.items()): + if arches == all_arches or 'all' in arches: + lines.append(binary) + else: + lines.append('%s [%s]' % (binary, ' '.join(sorted(arches)))) + if cruft: + print ' %s: %s' % (source, lines[0]) + else: + print '%s: %s' % (source, lines[0]) + for line in lines[1:]: + if cruft: + print ' ' + ' ' * (len(source) + 2) + line + else: + print ' ' * (len(source) + 2) + line + if not cruft: + print + + # Check source dependencies (Build-Depends and Build-Depends-Indep) + all_broken.clear() + metakey_bd = get_or_set_metadatakey("Build-Depends", session) + metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session) + params = { + 'suite_id': dbsuite.suite_id, + 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id), + } + statement = ''' + SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep + FROM source s + JOIN source_metadata sm ON s.id = sm.src_id + WHERE s.id in + (SELECT source FROM src_associations + WHERE suite = :suite_id) + AND sm.key_id in :metakey_ids + GROUP BY s.id, s.source''' + query = session.query('id', 'source', 'build_dep').from_statement(statement). \ + params(params) + for source_id, source, build_dep in query: + if source in removals: continue + parsed_dep = [] + if build_dep is not None: + # Remove [arch] information since we want to see breakage on all arches + build_dep = re_build_dep_arch.sub("", build_dep) + try: + parsed_dep += apt_pkg.parse_depends(build_dep) + except ValueError as e: + print "Error for source %s: %s" % (source, e) + for dep in parsed_dep: + unsat = 0 + for dep_package, _, _ in dep: + if dep_package in removals: + unsat += 1 + if unsat == len(dep): + component, = session.query(Component.component_name) \ + .join(Component.overrides) \ + .filter(Override.suite == overridesuite) \ + .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \ + .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \ + .first() + key = source + if component != "main": + key = "%s/%s" % (source, component) + all_broken.setdefault(key, set()).add(pp_deps(dep)) + dep_problem = 1 + + if all_broken: + if cruft: + print " - broken Build-Depends:" + else: + print "# Broken Build-Depends:" + for source, bdeps in sorted(all_broken.items()): + bdeps = sorted(bdeps) + if cruft: + print ' %s: %s' % (source, bdeps[0]) + else: + print '%s: %s' % (source, bdeps[0]) + for bdep in bdeps[1:]: + if cruft: + print ' ' + ' ' * (len(source) + 2) + bdep + else: + print ' ' * (len(source) + 2) + bdep + if not cruft: + print + + return dep_problem