X-Git-Url: https://git.decadent.org.uk/gitweb/?a=blobdiff_plain;f=daklib%2Futils.py;h=f702db3fd74d44a93caa314310458fe1a4b16387;hb=20fb8bcccb9c79bd5317856c3d8e6237021613de;hp=b9fc2cfce5b198f3382974cd688d9c4d8226d7f2;hpb=1136f901e22480f5f7c1c18e91c54ebfe07513cc;p=dak.git diff --git a/daklib/utils.py b/daklib/utils.py old mode 100755 new mode 100644 index b9fc2cfc..f702db3f --- a/daklib/utils.py +++ b/daklib/utils.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA import commands +import datetime import email.Header import os import pwd @@ -33,20 +34,27 @@ import sys import tempfile import traceback import stat +import apt_inst import apt_pkg import time import re import email as modemail import subprocess - -from dbconn import DBConn, get_architecture, get_component, get_suite, get_override_type, Keyring, session_wrapper +import ldap + +from dbconn import DBConn, get_architecture, get_component, get_suite, \ + get_override_type, Keyring, session_wrapper, \ + get_active_keyring_paths, get_primary_keyring_path, \ + get_suite_architectures, get_or_set_metadatakey, DBSource, \ + Component, Override, OverrideType +from sqlalchemy import desc from dak_exceptions import * from gpg import SignedFile from textutils import fix_maintainer from regexes import re_html_escaping, html_escaping, re_single_line_field, \ re_multi_line_field, re_srchasver, re_taint_free, \ re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \ - re_is_orig_source + re_is_orig_source, re_build_dep_arch from formats import parse_format, validate_changes_format from srcformats import get_format_from_string @@ -112,7 +120,7 @@ def open_file(filename, mode='r'): try: f = open(filename, mode) except IOError: - raise CantOpenError, filename + raise CantOpenError(filename) return f ################################################################################ @@ -135,7 +143,7 @@ def our_raw_input(prompt=""): ################################################################################ -def extract_component_from_section(section): +def extract_component_from_section(section, session=None): component = "" if section.find('/') != -1: @@ -143,20 +151,20 @@ def extract_component_from_section(section): # Expand default component if component == "": - if Cnf.has_key("Component::%s" % section): - component = section - else: + comp = get_component(section, session) + if comp is None: component = "main" + else: + component = comp.component_name return (section, component) ################################################################################ -def parse_deb822(armored_contents, signing_rules=0, keyrings=None): - if keyrings == None: - keyrings = [ k.keyring_name for k in DBConn().session().query(Keyring).filter(Keyring.active == True).all() ] +def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None): require_signature = True - if signing_rules == -1: + if keyrings == None: + keyrings = [] require_signature = False signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature) @@ -169,7 +177,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None): lines = contents.splitlines(True) if len(lines) == 0: - raise ParseChangesError, "[Empty changes file]" + raise ParseChangesError("[Empty changes file]") # Reindex by line number so we can easily verify the format of # .dsc files... @@ -187,7 +195,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None): line = indexed_lines[index] if line == "" and signing_rules == 1: if index != num_of_lines: - raise InvalidDscError, index + raise InvalidDscError(index) break slf = re_single_line_field.match(line) if slf: @@ -201,7 +209,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None): mlf = re_multi_line_field.match(line) if mlf: if first == -1: - raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line) + raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line)) if first == 1 and changes[field] != "": changes[field] += '\n' first = 0 @@ -209,7 +217,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None): continue error += line - changes["filecontents"] = "".join(lines) + changes["filecontents"] = armored_contents if changes.has_key("source"): # Strip the source version in brackets from the source field, @@ -220,7 +228,7 @@ def parse_deb822(armored_contents, signing_rules=0, keyrings=None): changes["source-version"] = srcver.group(2) if error: - raise ParseChangesError, error + raise ParseChangesError(error) return changes @@ -254,7 +262,7 @@ def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None): try: unicode(content, 'utf-8') except UnicodeError: - raise ChangesUnicodeError, "Changes file not proper utf-8" + raise ChangesUnicodeError("Changes file not proper utf-8") changes = parse_deb822(content, signing_rules, keyrings=keyrings) @@ -269,7 +277,7 @@ def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None): missingfields.append(keyword) if len(missingfields): - raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields) + raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)) return changes @@ -347,7 +355,7 @@ def check_size(where, files): for f in files.keys(): try: entry = os.stat(f) - except OSError, exc: + except OSError as exc: if exc.errno == 2: # TODO: This happens when the file is in the pool. continue @@ -362,7 +370,7 @@ def check_size(where, files): ################################################################################ -def check_dsc_files(dsc_filename, dsc=None, dsc_files=None): +def check_dsc_files(dsc_filename, dsc, dsc_files): """ Verify that the files listed in the Files field of the .dsc are those expected given the announced Format. @@ -381,13 +389,6 @@ def check_dsc_files(dsc_filename, dsc=None, dsc_files=None): """ rejmsg = [] - # Parse the file if needed - if dsc is None: - dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1); - - if dsc_files is None: - dsc_files = build_file_list(dsc, is_a_dsc=1) - # Ensure .dsc lists proper set of source files according to the format # announced has = defaultdict(lambda: 0) @@ -396,13 +397,13 @@ def check_dsc_files(dsc_filename, dsc=None, dsc_files=None): (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')), (r'diff.gz', ('debian_diff',)), (r'tar.gz', ('native_tar_gz', 'native_tar')), - (r'debian\.tar\.(gz|bz2)', ('debian_tar',)), - (r'orig\.tar\.(gz|bz2)', ('orig_tar',)), - (r'tar\.(gz|bz2)', ('native_tar',)), - (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)), + (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)), + (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)), + (r'tar\.(gz|bz2|xz)', ('native_tar',)), + (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)), ) - for f in dsc_files.keys(): + for f in dsc_files: m = re_issource.match(f) if not m: rejmsg.append("%s: %s in Files field not recognised as source." @@ -523,8 +524,7 @@ def parse_checksums(where, files, manifest, hashname): files[checkfile][hash_key(hashname)] = checksum for f in files.keys(): if not files[f].has_key(hash_key(hashname)): - rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile, - hashname, where)) + rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where)) return rejmsg ################################################################################ @@ -556,7 +556,7 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): else: (md5, size, name) = s except ValueError: - raise ParseChangesError, i + raise ParseChangesError(i) if section == "": section = "-" @@ -574,39 +574,28 @@ def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"): ################################################################################ # see http://bugs.debian.org/619131 -def build_package_set(dsc, session = None): - if not dsc.has_key("package-set"): +def build_package_list(dsc, session = None): + if not dsc.has_key("package-list"): return {} packages = {} - for line in dsc["package-set"].split("\n"): + for line in dsc["package-list"].split("\n"): if not line: break - (name, section, priority) = line.split() - (section, component) = extract_component_from_section(section) - - package_type = "deb" - if name.find(":") != -1: - (package_type, name) = name.split(":", 1) - if package_type == "src": - package_type = "dsc" + fields = line.split() + name = fields[0] + package_type = fields[1] + (section, component) = extract_component_from_section(fields[2]) + priority = fields[3] # Validate type if we have a session if session and get_override_type(package_type, session) is None: # Maybe just warn and ignore? exit(1) might be a bit hard... - utils.fubar("invalid type (%s) in Package-Set." % (package_type)) + utils.fubar("invalid type (%s) in Package-List." % (package_type)) - if section == "": - section = "-" - if priority == "": - priority = "-" - - if package_type == "dsc": - priority = "source" - - if not packages.has_key(name) or packages[name]["type"] == "dsc": + if name not in packages or packages[name]["type"] == "dsc": packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[]) return packages @@ -616,6 +605,14 @@ def build_package_set(dsc, session = None): def send_mail (message, filename=""): """sendmail wrapper, takes _either_ a message string or a file as arguments""" + maildir = Cnf.get('Dir::Mail') + if maildir: + path = os.path.join(maildir, datetime.datetime.now().isoformat()) + path = find_next_free(path) + fh = open(path, 'w') + print >>fh, message, + fh.close() + # Check whether we're supposed to be sending mail if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]: return @@ -684,14 +681,14 @@ def send_mail (message, filename=""): os.unlink (filename); return; - fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700); + fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700); os.write (fd, message_raw.as_string(True)); os.close (fd); # Invoke sendmail (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename)) if (result != 0): - raise SendmailFailedError, output + raise SendmailFailedError(output) # Clean up any temporary files if message: @@ -699,24 +696,22 @@ def send_mail (message, filename=""): ################################################################################ -def poolify (source, component): - if component: - component += '/' +def poolify (source, component=None): if source[:3] == "lib": - return component + source[:4] + '/' + source + '/' + return source[:4] + '/' + source + '/' else: - return component + source[:1] + '/' + source + '/' + return source[:1] + '/' + source + '/' ################################################################################ -def move (src, dest, overwrite = 0, perms = 0664): +def move (src, dest, overwrite = 0, perms = 0o664): if os.path.exists(dest) and os.path.isdir(dest): dest_dir = dest else: dest_dir = os.path.dirname(dest) if not os.path.exists(dest_dir): umask = os.umask(00000) - os.makedirs(dest_dir, 02775) + os.makedirs(dest_dir, 0o2775) os.umask(umask) #print "Moving %s to %s..." % (src, dest) if os.path.exists(dest) and os.path.isdir(dest): @@ -732,14 +727,14 @@ def move (src, dest, overwrite = 0, perms = 0664): os.chmod(dest, perms) os.unlink(src) -def copy (src, dest, overwrite = 0, perms = 0664): +def copy (src, dest, overwrite = 0, perms = 0o664): if os.path.exists(dest) and os.path.isdir(dest): dest_dir = dest else: dest_dir = os.path.dirname(dest) if not os.path.exists(dest_dir): umask = os.umask(00000) - os.makedirs(dest_dir, 02775) + os.makedirs(dest_dir, 0o2775) os.umask(umask) #print "Copying %s to %s..." % (src, dest) if os.path.exists(dest) and os.path.isdir(dest): @@ -770,11 +765,11 @@ def which_conf_file (): res = socket.getfqdn() # In case we allow local config files per user, try if one exists - if Cnf.FindB("Config::" + res + "::AllowLocalConfig"): + if Cnf.find_b("Config::" + res + "::AllowLocalConfig"): homedir = os.getenv("HOME") confpath = os.path.join(homedir, "/etc/dak.conf") if os.path.exists(confpath): - apt_pkg.ReadConfigFileISC(Cnf,default_config) + apt_pkg.ReadConfigFileISC(Cnf,confpath) # We are still in here, so there is no local config file or we do # not allow local files. Do the normal stuff. @@ -786,7 +781,7 @@ def which_conf_file (): def which_apt_conf_file (): res = socket.getfqdn() # In case we allow local config files per user, try if one exists - if Cnf.FindB("Config::" + res + "::AllowLocalConfig"): + if Cnf.find_b("Config::" + res + "::AllowLocalConfig"): homedir = os.getenv("HOME") confpath = os.path.join(homedir, "/etc/dak.conf") if os.path.exists(confpath): @@ -882,7 +877,7 @@ def changes_compare (a, b): # Sort by source version a_version = a_changes.get("version", "0") b_version = b_changes.get("version", "0") - q = apt_pkg.VersionCompare(a_version, b_version) + q = apt_pkg.version_compare(a_version, b_version) if q: return q @@ -1019,8 +1014,8 @@ def parse_args(Options): suite_ids_list = [] for suitename in split_args(Options["Suite"]): suite = get_suite(suitename, session=session) - if suite.suite_id is None: - warn("suite '%s' not recognised." % (suite.suite_name)) + if not suite or suite.suite_id is None: + warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename)) else: suite_ids_list.append(suite.suite_id) if suite_ids_list: @@ -1072,43 +1067,6 @@ def parse_args(Options): ################################################################################ -# Inspired(tm) by Bryn Keller's print_exc_plus (See -# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215) - -def print_exc(): - tb = sys.exc_info()[2] - while tb.tb_next: - tb = tb.tb_next - stack = [] - frame = tb.tb_frame - while frame: - stack.append(frame) - frame = frame.f_back - stack.reverse() - traceback.print_exc() - for frame in stack: - print "\nFrame %s in %s at line %s" % (frame.f_code.co_name, - frame.f_code.co_filename, - frame.f_lineno) - for key, value in frame.f_locals.items(): - print "\t%20s = " % key, - try: - print value - except: - print "" - -################################################################################ - -def try_with_debug(function): - try: - function() - except SystemExit: - raise - except: - print_exc() - -################################################################################ - def arch_compare_sw (a, b): """ Function for use in sorting lists of architectures. @@ -1250,7 +1208,7 @@ def retrieve_key (filename, keyserver=None, keyring=None): if not keyserver: keyserver = Cnf["Dinstall::KeyServer"] if not keyring: - keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0] + keyring = get_primary_keyring_path() # Ensure the filename contains no shell meta-characters or other badness if not re_taint_free.match(filename): @@ -1287,7 +1245,7 @@ def retrieve_key (filename, keyserver=None, keyring=None): def gpg_keyring_args(keyrings=None): if not keyrings: - keyrings = Cnf.ValueList("Dinstall::GPGKeyring") + keyrings = get_active_keyring_paths() return " ".join(["--keyring %s" % x for x in keyrings]) @@ -1432,50 +1390,37 @@ def gpg_get_key_addresses(fingerprint): addresses = key_uid_email_cache.get(fingerprint) if addresses != None: return addresses - addresses = set() + addresses = list() cmd = "gpg --no-default-keyring %s --fingerprint %s" \ % (gpg_keyring_args(), fingerprint) (result, output) = commands.getstatusoutput(cmd) if result == 0: for l in output.split('\n'): m = re_gpg_uid.match(l) - if m: - addresses.add(m.group(1)) + if not m: + continue + address = m.group(1) + if address.endswith('@debian.org'): + # prefer @debian.org addresses + # TODO: maybe not hardcode the domain + addresses.insert(0, address) + else: + addresses.append(m.group(1)) key_uid_email_cache[fingerprint] = addresses return addresses ################################################################################ -# Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603 +def get_login_from_ldap(fingerprint): + """retrieve login from LDAP linked to a given fingerprint""" -def wrap(paragraph, max_length, prefix=""): - line = "" - s = "" - have_started = 0 - words = paragraph.split() - - for word in words: - word_size = len(word) - if word_size > max_length: - if have_started: - s += line + '\n' + prefix - s += word + '\n' + prefix - else: - if have_started: - new_length = len(line) + word_size + 1 - if new_length > max_length: - s += line + '\n' + prefix - line = word - else: - line += ' ' + word - else: - line = word - have_started = 1 - - if have_started: - s += line - - return s + LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn'] + LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer'] + l = ldap.open(LDAPServer) + l.simple_bind_s('','') + Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL, + '(keyfingerprint=%s)' % fingerprint, ['uid']) + return Attrs[0][1]['uid'][0] ################################################################################ @@ -1546,7 +1491,7 @@ def get_changes_files(from_dir): # Much of the rest of p-u/p-a depends on being in the right place os.chdir(from_dir) changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')] - except OSError, e: + except OSError as e: fubar("Failed to read list from directory %s (%s)" % (from_dir, e)) return changes_files @@ -1555,12 +1500,12 @@ def get_changes_files(from_dir): apt_pkg.init() -Cnf = apt_pkg.newConfiguration() +Cnf = apt_pkg.Configuration() if not os.getenv("DAK_TEST"): - apt_pkg.ReadConfigFileISC(Cnf,default_config) + apt_pkg.read_config_file_isc(Cnf,default_config) if which_conf_file() != default_config: - apt_pkg.ReadConfigFileISC(Cnf,which_conf_file()) + apt_pkg.read_config_file_isc(Cnf,which_conf_file()) ################################################################################ @@ -1578,7 +1523,7 @@ def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/w try: f = open(file) lines = f.readlines() - except IOError, e: + except IOError as e: print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file lines = [] wnpp = {} @@ -1618,7 +1563,6 @@ def get_packages_from_ftp(root, suite, component, architecture): @rtype: TagFile @return: apt_pkg class containing package data - """ filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture) (fd, temp_file) = temp_filename() @@ -1634,3 +1578,243 @@ def get_packages_from_ftp(root, suite, component, architecture): Packages = apt_pkg.ParseTagFile(packages) os.unlink(temp_file) return Packages + +################################################################################ + +def deb_extract_control(fh): + """extract DEBIAN/control from a binary package""" + return apt_inst.DebFile(fh).control.extractdata("control") + +################################################################################ + +def mail_addresses_for_upload(maintainer, changed_by, fingerprint): + """mail addresses to contact for an upload + + @type maintainer: str + @param maintainer: Maintainer field of the .changes file + + @type changed_by: str + @param changed_by: Changed-By field of the .changes file + + @type fingerprint: str + @param fingerprint: fingerprint of the key used to sign the upload + + @rtype: list of str + @return: list of RFC 2047-encoded mail addresses to contact regarding + this upload + """ + addresses = [maintainer] + if changed_by != maintainer: + addresses.append(changed_by) + + fpr_addresses = gpg_get_key_addresses(fingerprint) + if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses: + addresses.append(fpr_addresses[0]) + + encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ] + return encoded_addresses + +################################################################################ + +def call_editor(text="", suffix=".txt"): + """run editor and return the result as a string + + @type text: str + @param text: initial text + + @type suffix: str + @param suffix: extension for temporary file + + @rtype: str + @return: string with the edited text + """ + editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi')) + tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False) + try: + print >>tmp, text, + tmp.close() + subprocess.check_call([editor, tmp.name]) + return open(tmp.name, 'r').read() + finally: + os.unlink(tmp.name) + +################################################################################ + +def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False): + dbsuite = get_suite(suite, session) + overridesuite = dbsuite + if dbsuite.overridesuite is not None: + overridesuite = get_suite(dbsuite.overridesuite, session) + dep_problem = 0 + p2c = {} + all_broken = {} + if arches: + all_arches = set(arches) + else: + all_arches = set([x.arch_string for x in get_suite_architectures(suite)]) + all_arches -= set(["source", "all"]) + metakey_d = get_or_set_metadatakey("Depends", session) + metakey_p = get_or_set_metadatakey("Provides", session) + params = { + 'suite_id': dbsuite.suite_id, + 'metakey_d_id': metakey_d.key_id, + 'metakey_p_id': metakey_p.key_id, + } + for architecture in all_arches | set(['all']): + deps = {} + sources = {} + virtual_packages = {} + params['arch_id'] = get_architecture(architecture, session).arch_id + + statement = ''' + SELECT b.id, b.package, s.source, c.name as component, + (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends, + (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides + FROM binaries b + JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id + JOIN source s ON b.source = s.id + JOIN files_archive_map af ON b.file = af.file_id + JOIN component c ON af.component_id = c.id + WHERE b.architecture = :arch_id''' + query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \ + from_statement(statement).params(params) + for binary_id, package, source, component, depends, provides in query: + sources[package] = source + p2c[package] = component + if depends is not None: + deps[package] = depends + # Maintain a counter for each virtual package. If a + # Provides: exists, set the counter to 0 and count all + # provides by a package not in the list for removal. + # If the counter stays 0 at the end, we know that only + # the to-be-removed packages provided this virtual + # package. + if provides is not None: + for virtual_pkg in provides.split(","): + virtual_pkg = virtual_pkg.strip() + if virtual_pkg == package: continue + if not virtual_packages.has_key(virtual_pkg): + virtual_packages[virtual_pkg] = 0 + if package not in removals: + virtual_packages[virtual_pkg] += 1 + + # If a virtual package is only provided by the to-be-removed + # packages, treat the virtual package as to-be-removed too. + for virtual_pkg in virtual_packages.keys(): + if virtual_packages[virtual_pkg] == 0: + removals.append(virtual_pkg) + + # Check binary dependencies (Depends) + for package in deps.keys(): + if package in removals: continue + parsed_dep = [] + try: + parsed_dep += apt_pkg.ParseDepends(deps[package]) + except ValueError as e: + print "Error for package %s: %s" % (package, e) + for dep in parsed_dep: + # Check for partial breakage. If a package has a ORed + # dependency, there is only a dependency problem if all + # packages in the ORed depends will be removed. + unsat = 0 + for dep_package, _, _ in dep: + if dep_package in removals: + unsat += 1 + if unsat == len(dep): + component = p2c[package] + source = sources[package] + if component != "main": + source = "%s/%s" % (source, component) + all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture) + dep_problem = 1 + + if all_broken: + if cruft: + print " - broken Depends:" + else: + print "# Broken Depends:" + for source, bindict in sorted(all_broken.items()): + lines = [] + for binary, arches in sorted(bindict.items()): + if arches == all_arches or 'all' in arches: + lines.append(binary) + else: + lines.append('%s [%s]' % (binary, ' '.join(sorted(arches)))) + if cruft: + print ' %s: %s' % (source, lines[0]) + else: + print '%s: %s' % (source, lines[0]) + for line in lines[1:]: + if cruft: + print ' ' + ' ' * (len(source) + 2) + line + else: + print ' ' * (len(source) + 2) + line + if not cruft: + print + + # Check source dependencies (Build-Depends and Build-Depends-Indep) + all_broken.clear() + metakey_bd = get_or_set_metadatakey("Build-Depends", session) + metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session) + params = { + 'suite_id': dbsuite.suite_id, + 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id), + } + statement = ''' + SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep + FROM source s + JOIN source_metadata sm ON s.id = sm.src_id + WHERE s.id in + (SELECT source FROM src_associations + WHERE suite = :suite_id) + AND sm.key_id in :metakey_ids + GROUP BY s.id, s.source''' + query = session.query('id', 'source', 'build_dep').from_statement(statement). \ + params(params) + for source_id, source, build_dep in query: + if source in removals: continue + parsed_dep = [] + if build_dep is not None: + # Remove [arch] information since we want to see breakage on all arches + build_dep = re_build_dep_arch.sub("", build_dep) + try: + parsed_dep += apt_pkg.ParseDepends(build_dep) + except ValueError as e: + print "Error for source %s: %s" % (source, e) + for dep in parsed_dep: + unsat = 0 + for dep_package, _, _ in dep: + if dep_package in removals: + unsat += 1 + if unsat == len(dep): + component, = session.query(Component.component_name) \ + .join(Component.overrides) \ + .filter(Override.suite == overridesuite) \ + .filter(Override.package == source) \ + .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \ + .first() + if component != "main": + source = "%s/%s" % (source, component) + all_broken.setdefault(source, set()).add(pp_deps(dep)) + dep_problem = 1 + + if all_broken: + if cruft: + print " - broken Build-Depends:" + else: + print "# Broken Build-Depends:" + for source, bdeps in sorted(all_broken.items()): + bdeps = sorted(bdeps) + if cruft: + print ' %s: %s' % (source, bdeps[0]) + else: + print '%s: %s' % (source, bdeps[0]) + for bdep in bdeps[1:]: + if cruft: + print ' ' + ' ' * (len(source) + 2) + bdep + else: + print ' ' * (len(source) + 2) + bdep + if not cruft: + print + + return dep_problem