2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
46 import daklib.config as config
47 from dbconn import DBConn, get_architecture, get_component, get_suite, \
48 get_override_type, Keyring, session_wrapper, \
49 get_active_keyring_paths, get_primary_keyring_path, \
50 get_suite_architectures, get_or_set_metadatakey, DBSource, \
51 Component, Override, OverrideType
52 from sqlalchemy import desc
53 from dak_exceptions import *
54 from gpg import SignedFile
55 from textutils import fix_maintainer
56 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
57 re_multi_line_field, re_srchasver, re_taint_free, \
58 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
59 re_is_orig_source, re_build_dep_arch
61 from formats import parse_format, validate_changes_format
62 from srcformats import get_format_from_string
63 from collections import defaultdict
65 ################################################################################
67 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
68 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
70 alias_cache = None #: Cache for email alias checks
71 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
73 # (hashname, function, earliest_changes_version)
74 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
75 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
77 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
78 # code in lenny's Python. This also affects commands.getoutput and
80 def dak_getstatusoutput(cmd):
81 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
82 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
84 output = pipe.stdout.read()
88 if output[-1:] == '\n':
96 commands.getstatusoutput = dak_getstatusoutput
98 ################################################################################
101 """ Escape html chars """
102 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
104 ################################################################################
106 def open_file(filename, mode='r'):
108 Open C{file}, return fileobject.
110 @type filename: string
111 @param filename: path/filename to open
114 @param mode: open mode
117 @return: open fileobject
119 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
123 f = open(filename, mode)
125 raise CantOpenError(filename)
128 ################################################################################
130 def our_raw_input(prompt=""):
134 sys.stdout.write(prompt)
143 sys.stderr.write("\nUser interrupt (^D).\n")
146 ################################################################################
148 def extract_component_from_section(section, session=None):
151 if section.find('/') != -1:
152 component = section.split('/')[0]
154 # Expand default component
156 comp = get_component(section, session)
160 component = comp.component_name
162 return (section, component)
164 ################################################################################
166 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
167 require_signature = True
170 require_signature = False
172 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
173 contents = signed_file.contents
178 # Split the lines in the input, keeping the linebreaks.
179 lines = contents.splitlines(True)
182 raise ParseChangesError("[Empty changes file]")
184 # Reindex by line number so we can easily verify the format of
190 indexed_lines[index] = line[:-1]
192 num_of_lines = len(indexed_lines.keys())
195 while index < num_of_lines:
197 line = indexed_lines[index]
198 if line == "" and signing_rules == 1:
199 if index != num_of_lines:
200 raise InvalidDscError(index)
202 slf = re_single_line_field.match(line)
204 field = slf.groups()[0].lower()
205 changes[field] = slf.groups()[1]
209 changes[field] += '\n'
211 mlf = re_multi_line_field.match(line)
214 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
215 if first == 1 and changes[field] != "":
216 changes[field] += '\n'
218 changes[field] += mlf.groups()[0] + '\n'
222 changes["filecontents"] = armored_contents
224 if changes.has_key("source"):
225 # Strip the source version in brackets from the source field,
226 # put it in the "source-version" field instead.
227 srcver = re_srchasver.search(changes["source"])
229 changes["source"] = srcver.group(1)
230 changes["source-version"] = srcver.group(2)
233 raise ParseChangesError(error)
237 ################################################################################
239 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
241 Parses a changes file and returns a dictionary where each field is a
242 key. The mandatory first argument is the filename of the .changes
245 signing_rules is an optional argument:
247 - If signing_rules == -1, no signature is required.
248 - If signing_rules == 0 (the default), a signature is required.
249 - If signing_rules == 1, it turns on the same strict format checking
252 The rules for (signing_rules == 1)-mode are:
254 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
255 followed by any PGP header data and must end with a blank line.
257 - The data section must end with a blank line and must be followed by
258 "-----BEGIN PGP SIGNATURE-----".
261 changes_in = open_file(filename)
262 content = changes_in.read()
265 unicode(content, 'utf-8')
267 raise ChangesUnicodeError("Changes file not proper utf-8")
268 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
272 # Finally ensure that everything needed for .changes is there
273 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
274 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
277 for keyword in must_keywords:
278 if not changes.has_key(keyword.lower()):
279 missingfields.append(keyword)
281 if len(missingfields):
282 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
286 ################################################################################
288 def hash_key(hashname):
289 return '%ssum' % hashname
291 ################################################################################
293 def create_hash(where, files, hashname, hashfunc):
295 create_hash extends the passed files dict with the given hash by
296 iterating over all files on disk and passing them to the hashing
301 for f in files.keys():
303 file_handle = open_file(f)
304 except CantOpenError:
305 rejmsg.append("Could not open file %s for checksumming" % (f))
308 files[f][hash_key(hashname)] = hashfunc(file_handle)
313 ################################################################################
315 def check_hash(where, files, hashname, hashfunc):
317 check_hash checks the given hash in the files dict against the actual
318 files on disk. The hash values need to be present consistently in
319 all file entries. It does not modify its input in any way.
323 for f in files.keys():
327 file_handle = open_file(f)
329 # Check for the hash entry, to not trigger a KeyError.
330 if not files[f].has_key(hash_key(hashname)):
331 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
335 # Actually check the hash for correctness.
336 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
337 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
339 except CantOpenError:
340 # TODO: This happens when the file is in the pool.
341 # warn("Cannot open file %s" % f)
348 ################################################################################
350 def check_size(where, files):
352 check_size checks the file sizes in the passed files dict against the
357 for f in files.keys():
360 except OSError as exc:
362 # TODO: This happens when the file is in the pool.
366 actual_size = entry[stat.ST_SIZE]
367 size = int(files[f]["size"])
368 if size != actual_size:
369 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
370 % (f, actual_size, size, where))
373 ################################################################################
375 def check_dsc_files(dsc_filename, dsc, dsc_files):
377 Verify that the files listed in the Files field of the .dsc are
378 those expected given the announced Format.
380 @type dsc_filename: string
381 @param dsc_filename: path of .dsc file
384 @param dsc: the content of the .dsc parsed by C{parse_changes()}
386 @type dsc_files: dict
387 @param dsc_files: the file list returned by C{build_file_list()}
390 @return: all errors detected
394 # Ensure .dsc lists proper set of source files according to the format
396 has = defaultdict(lambda: 0)
399 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
400 (r'diff.gz', ('debian_diff',)),
401 (r'tar.gz', ('native_tar_gz', 'native_tar')),
402 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
403 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
404 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
405 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
409 m = re_issource.match(f)
411 rejmsg.append("%s: %s in Files field not recognised as source."
415 # Populate 'has' dictionary by resolving keys in lookup table
417 for regex, keys in ftype_lookup:
418 if re.match(regex, m.group(3)):
424 # File does not match anything in lookup table; reject
426 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
428 # Check for multiple files
429 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
430 if has[file_type] > 1:
431 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
433 # Source format specific tests
435 format = get_format_from_string(dsc['format'])
437 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
440 except UnknownFormatError:
441 # Not an error here for now
446 ################################################################################
448 def check_hash_fields(what, manifest):
450 check_hash_fields ensures that there are no checksum fields in the
451 given dict that we do not know about.
455 hashes = map(lambda x: x[0], known_hashes)
456 for field in manifest:
457 if field.startswith("checksums-"):
458 hashname = field.split("-",1)[1]
459 if hashname not in hashes:
460 rejmsg.append("Unsupported checksum field for %s "\
461 "in %s" % (hashname, what))
464 ################################################################################
466 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
467 if format >= version:
468 # The version should contain the specified hash.
471 # Import hashes from the changes
472 rejmsg = parse_checksums(".changes", files, changes, hashname)
476 # We need to calculate the hash because it can't possibly
479 return func(".changes", files, hashname, hashfunc)
481 # We could add the orig which might be in the pool to the files dict to
482 # access the checksums easily.
484 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
486 ensure_dsc_hashes' task is to ensure that each and every *present* hash
487 in the dsc is correct, i.e. identical to the changes file and if necessary
488 the pool. The latter task is delegated to check_hash.
492 if not dsc.has_key('Checksums-%s' % (hashname,)):
494 # Import hashes from the dsc
495 parse_checksums(".dsc", dsc_files, dsc, hashname)
497 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
500 ################################################################################
502 def parse_checksums(where, files, manifest, hashname):
504 field = 'checksums-%s' % hashname
505 if not field in manifest:
507 for line in manifest[field].split('\n'):
510 clist = line.strip().split(' ')
512 checksum, size, checkfile = clist
514 rejmsg.append("Cannot parse checksum line [%s]" % (line))
516 if not files.has_key(checkfile):
517 # TODO: check for the file's entry in the original files dict, not
518 # the one modified by (auto)byhand and other weird stuff
519 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
520 # (file, hashname, where))
522 if not files[checkfile]["size"] == size:
523 rejmsg.append("%s: size differs for files and checksums-%s entry "\
524 "in %s" % (checkfile, hashname, where))
526 files[checkfile][hash_key(hashname)] = checksum
527 for f in files.keys():
528 if not files[f].has_key(hash_key(hashname)):
529 rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
532 ################################################################################
534 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
536 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
539 # Make sure we have a Files: field to parse...
540 if not changes.has_key(field):
541 raise NoFilesFieldError
543 # Validate .changes Format: field
545 validate_changes_format(parse_format(changes['format']), field)
547 includes_section = (not is_a_dsc) and field == "files"
549 # Parse each entry/line:
550 for i in changes[field].split('\n'):
554 section = priority = ""
557 (md5, size, section, priority, name) = s
559 (md5, size, name) = s
561 raise ParseChangesError(i)
568 (section, component) = extract_component_from_section(section)
570 files[name] = dict(size=size, section=section,
571 priority=priority, component=component)
572 files[name][hashname] = md5
576 ################################################################################
578 # see http://bugs.debian.org/619131
579 def build_package_list(dsc, session = None):
580 if not dsc.has_key("package-list"):
585 for line in dsc["package-list"].split("\n"):
589 fields = line.split()
591 package_type = fields[1]
592 (section, component) = extract_component_from_section(fields[2])
595 # Validate type if we have a session
596 if session and get_override_type(package_type, session) is None:
597 # Maybe just warn and ignore? exit(1) might be a bit hard...
598 utils.fubar("invalid type (%s) in Package-List." % (package_type))
600 if name not in packages or packages[name]["type"] == "dsc":
601 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
605 ################################################################################
607 def send_mail (message, filename="", whitelists=None):
608 """sendmail wrapper, takes _either_ a message string or a file as arguments
610 @type whitelists: list of (str or None)
611 @param whitelists: path to whitelists. C{None} or an empty list whitelists
612 everything, otherwise an address is whitelisted if it is
613 included in any of the lists.
614 In addition a global whitelist can be specified in
615 Dinstall::MailWhiteList.
618 maildir = Cnf.get('Dir::Mail')
620 path = os.path.join(maildir, datetime.datetime.now().isoformat())
621 path = find_next_free(path)
626 # Check whether we're supposed to be sending mail
627 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
630 # If we've been passed a string dump it into a temporary file
632 (fd, filename) = tempfile.mkstemp()
633 os.write (fd, message)
636 if whitelists is None or None in whitelists:
638 if Cnf.get('Dinstall::MailWhiteList', ''):
639 whitelists.append(Cnf['Dinstall::MailWhiteList'])
640 if len(whitelists) != 0:
641 message_in = open_file(filename)
642 message_raw = modemail.message_from_file(message_in)
646 for path in whitelists:
647 with open_file(path, 'r') as whitelist_in:
648 for line in whitelist_in:
649 if not re_whitespace_comment.match(line):
650 if re_re_mark.match(line):
651 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
653 whitelist.append(re.compile(re.escape(line.strip())))
656 fields = ["To", "Bcc", "Cc"]
659 value = message_raw.get(field, None)
662 for item in value.split(","):
663 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
669 if not mail_whitelisted:
670 print "Skipping {0} since it's not whitelisted".format(item)
674 # Doesn't have any mail in whitelist so remove the header
676 del message_raw[field]
678 message_raw.replace_header(field, ', '.join(match))
680 # Change message fields in order if we don't have a To header
681 if not message_raw.has_key("To"):
684 if message_raw.has_key(field):
685 message_raw[fields[-1]] = message_raw[field]
686 del message_raw[field]
689 # Clean up any temporary files
690 # and return, as we removed all recipients.
692 os.unlink (filename);
695 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
696 os.write (fd, message_raw.as_string(True));
700 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
702 raise SendmailFailedError(output)
704 # Clean up any temporary files
708 ################################################################################
710 def poolify (source, component=None):
711 if source[:3] == "lib":
712 return source[:4] + '/' + source + '/'
714 return source[:1] + '/' + source + '/'
716 ################################################################################
718 def move (src, dest, overwrite = 0, perms = 0o664):
719 if os.path.exists(dest) and os.path.isdir(dest):
722 dest_dir = os.path.dirname(dest)
723 if not os.path.exists(dest_dir):
724 umask = os.umask(00000)
725 os.makedirs(dest_dir, 0o2775)
727 #print "Moving %s to %s..." % (src, dest)
728 if os.path.exists(dest) and os.path.isdir(dest):
729 dest += '/' + os.path.basename(src)
730 # Don't overwrite unless forced to
731 if os.path.exists(dest):
733 fubar("Can't move %s to %s - file already exists." % (src, dest))
735 if not os.access(dest, os.W_OK):
736 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
737 shutil.copy2(src, dest)
738 os.chmod(dest, perms)
741 def copy (src, dest, overwrite = 0, perms = 0o664):
742 if os.path.exists(dest) and os.path.isdir(dest):
745 dest_dir = os.path.dirname(dest)
746 if not os.path.exists(dest_dir):
747 umask = os.umask(00000)
748 os.makedirs(dest_dir, 0o2775)
750 #print "Copying %s to %s..." % (src, dest)
751 if os.path.exists(dest) and os.path.isdir(dest):
752 dest += '/' + os.path.basename(src)
753 # Don't overwrite unless forced to
754 if os.path.exists(dest):
756 raise FileExistsError
758 if not os.access(dest, os.W_OK):
759 raise CantOverwriteError
760 shutil.copy2(src, dest)
761 os.chmod(dest, perms)
763 ################################################################################
766 res = socket.getfqdn()
767 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
768 if database_hostname:
769 return database_hostname
773 def which_conf_file ():
774 if os.getenv('DAK_CONFIG'):
775 return os.getenv('DAK_CONFIG')
777 res = socket.getfqdn()
778 # In case we allow local config files per user, try if one exists
779 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
780 homedir = os.getenv("HOME")
781 confpath = os.path.join(homedir, "/etc/dak.conf")
782 if os.path.exists(confpath):
783 apt_pkg.ReadConfigFileISC(Cnf,confpath)
785 # We are still in here, so there is no local config file or we do
786 # not allow local files. Do the normal stuff.
787 if Cnf.get("Config::" + res + "::DakConfig"):
788 return Cnf["Config::" + res + "::DakConfig"]
790 return default_config
792 def which_apt_conf_file ():
793 res = socket.getfqdn()
794 # In case we allow local config files per user, try if one exists
795 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
796 homedir = os.getenv("HOME")
797 confpath = os.path.join(homedir, "/etc/dak.conf")
798 if os.path.exists(confpath):
799 apt_pkg.ReadConfigFileISC(Cnf,default_config)
801 if Cnf.get("Config::" + res + "::AptConfig"):
802 return Cnf["Config::" + res + "::AptConfig"]
804 return default_apt_config
806 def which_alias_file():
807 hostname = socket.getfqdn()
808 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
809 if os.path.exists(aliasfn):
814 ################################################################################
816 def TemplateSubst(subst_map, filename):
817 """ Perform a substition of template """
818 templatefile = open_file(filename)
819 template = templatefile.read()
820 for k, v in subst_map.iteritems():
821 template = template.replace(k, str(v))
825 ################################################################################
827 def fubar(msg, exit_code=1):
828 sys.stderr.write("E: %s\n" % (msg))
832 sys.stderr.write("W: %s\n" % (msg))
834 ################################################################################
836 # Returns the user name with a laughable attempt at rfc822 conformancy
837 # (read: removing stray periods).
839 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
842 return pwd.getpwuid(os.getuid())[0]
844 ################################################################################
854 return ("%d%s" % (c, t))
856 ################################################################################
858 def cc_fix_changes (changes):
859 o = changes.get("architecture", "")
861 del changes["architecture"]
862 changes["architecture"] = {}
864 changes["architecture"][j] = 1
866 def changes_compare (a, b):
867 """ Sort by source name, source version, 'have source', and then by filename """
869 a_changes = parse_changes(a)
874 b_changes = parse_changes(b)
878 cc_fix_changes (a_changes)
879 cc_fix_changes (b_changes)
881 # Sort by source name
882 a_source = a_changes.get("source")
883 b_source = b_changes.get("source")
884 q = cmp (a_source, b_source)
888 # Sort by source version
889 a_version = a_changes.get("version", "0")
890 b_version = b_changes.get("version", "0")
891 q = apt_pkg.version_compare(a_version, b_version)
895 # Sort by 'have source'
896 a_has_source = a_changes["architecture"].get("source")
897 b_has_source = b_changes["architecture"].get("source")
898 if a_has_source and not b_has_source:
900 elif b_has_source and not a_has_source:
903 # Fall back to sort by filename
906 ################################################################################
908 def find_next_free (dest, too_many=100):
911 while os.path.exists(dest) and extra < too_many:
912 dest = orig_dest + '.' + repr(extra)
914 if extra >= too_many:
915 raise NoFreeFilenameError
918 ################################################################################
920 def result_join (original, sep = '\t'):
922 for i in xrange(len(original)):
923 if original[i] == None:
924 resultlist.append("")
926 resultlist.append(original[i])
927 return sep.join(resultlist)
929 ################################################################################
931 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
933 for line in str.split('\n'):
935 if line or include_blank_lines:
936 out += "%s%s\n" % (prefix, line)
937 # Strip trailing new line
942 ################################################################################
944 def validate_changes_file_arg(filename, require_changes=1):
946 'filename' is either a .changes or .dak file. If 'filename' is a
947 .dak file, it's changed to be the corresponding .changes file. The
948 function then checks if the .changes file a) exists and b) is
949 readable and returns the .changes filename if so. If there's a
950 problem, the next action depends on the option 'require_changes'
953 - If 'require_changes' == -1, errors are ignored and the .changes
954 filename is returned.
955 - If 'require_changes' == 0, a warning is given and 'None' is returned.
956 - If 'require_changes' == 1, a fatal error is raised.
961 orig_filename = filename
962 if filename.endswith(".dak"):
963 filename = filename[:-4]+".changes"
965 if not filename.endswith(".changes"):
966 error = "invalid file type; not a changes file"
968 if not os.access(filename,os.R_OK):
969 if os.path.exists(filename):
970 error = "permission denied"
972 error = "file not found"
975 if require_changes == 1:
976 fubar("%s: %s." % (orig_filename, error))
977 elif require_changes == 0:
978 warn("Skipping %s - %s" % (orig_filename, error))
980 else: # We only care about the .dak file
985 ################################################################################
988 return (arch != "source" and arch != "all")
990 ################################################################################
992 def join_with_commas_and(list):
993 if len(list) == 0: return "nothing"
994 if len(list) == 1: return list[0]
995 return ", ".join(list[:-1]) + " and " + list[-1]
997 ################################################################################
1002 (pkg, version, constraint) = atom
1004 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
1007 pp_deps.append(pp_dep)
1008 return " |".join(pp_deps)
1010 ################################################################################
1015 ################################################################################
1017 def parse_args(Options):
1018 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1019 # XXX: This should go away and everything which calls it be converted
1020 # to use SQLA properly. For now, we'll just fix it not to use
1021 # the old Pg interface though
1022 session = DBConn().session()
1024 if Options["Suite"]:
1026 for suitename in split_args(Options["Suite"]):
1027 suite = get_suite(suitename, session=session)
1028 if not suite or suite.suite_id is None:
1029 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
1031 suite_ids_list.append(suite.suite_id)
1033 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1035 fubar("No valid suite given.")
1040 if Options["Component"]:
1041 component_ids_list = []
1042 for componentname in split_args(Options["Component"]):
1043 component = get_component(componentname, session=session)
1044 if component is None:
1045 warn("component '%s' not recognised." % (componentname))
1047 component_ids_list.append(component.component_id)
1048 if component_ids_list:
1049 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1051 fubar("No valid component given.")
1055 # Process architecture
1056 con_architectures = ""
1058 if Options["Architecture"]:
1060 for archname in split_args(Options["Architecture"]):
1061 if archname == "source":
1064 arch = get_architecture(archname, session=session)
1066 warn("architecture '%s' not recognised." % (archname))
1068 arch_ids_list.append(arch.arch_id)
1070 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1072 if not check_source:
1073 fubar("No valid architecture given.")
1077 return (con_suites, con_architectures, con_components, check_source)
1079 ################################################################################
1081 def arch_compare_sw (a, b):
1083 Function for use in sorting lists of architectures.
1085 Sorts normally except that 'source' dominates all others.
1088 if a == "source" and b == "source":
1097 ################################################################################
1099 def split_args (s, dwim=1):
1101 Split command line arguments which can be separated by either commas
1102 or whitespace. If dwim is set, it will complain about string ending
1103 in comma since this usually means someone did 'dak ls -a i386, m68k
1104 foo' or something and the inevitable confusion resulting from 'm68k'
1105 being treated as an argument is undesirable.
1108 if s.find(",") == -1:
1111 if s[-1:] == "," and dwim:
1112 fubar("split_args: found trailing comma, spurious space maybe?")
1115 ################################################################################
1117 def gpgv_get_status_output(cmd, status_read, status_write):
1119 Our very own version of commands.getouputstatus(), hacked to support
1123 cmd = ['/bin/sh', '-c', cmd]
1124 p2cread, p2cwrite = os.pipe()
1125 c2pread, c2pwrite = os.pipe()
1126 errout, errin = os.pipe()
1136 for i in range(3, 256):
1137 if i != status_write:
1143 os.execvp(cmd[0], cmd)
1149 os.dup2(c2pread, c2pwrite)
1150 os.dup2(errout, errin)
1152 output = status = ""
1154 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1157 r = os.read(fd, 8196)
1159 more_data.append(fd)
1160 if fd == c2pwrite or fd == errin:
1162 elif fd == status_read:
1165 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1167 pid, exit_status = os.waitpid(pid, 0)
1169 os.close(status_write)
1170 os.close(status_read)
1180 return output, status, exit_status
1182 ################################################################################
1184 def process_gpgv_output(status):
1185 # Process the status-fd output
1188 for line in status.split('\n'):
1192 split = line.split()
1194 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1196 (gnupg, keyword) = split[:2]
1197 if gnupg != "[GNUPG:]":
1198 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1201 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1202 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1205 keywords[keyword] = args
1207 return (keywords, internal_error)
1209 ################################################################################
1211 def retrieve_key (filename, keyserver=None, keyring=None):
1213 Retrieve the key that signed 'filename' from 'keyserver' and
1214 add it to 'keyring'. Returns nothing on success, or an error message
1218 # Defaults for keyserver and keyring
1220 keyserver = Cnf["Dinstall::KeyServer"]
1222 keyring = get_primary_keyring_path()
1224 # Ensure the filename contains no shell meta-characters or other badness
1225 if not re_taint_free.match(filename):
1226 return "%s: tainted filename" % (filename)
1228 # Invoke gpgv on the file
1229 status_read, status_write = os.pipe()
1230 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1231 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1233 # Process the status-fd output
1234 (keywords, internal_error) = process_gpgv_output(status)
1236 return internal_error
1238 if not keywords.has_key("NO_PUBKEY"):
1239 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1241 fingerprint = keywords["NO_PUBKEY"][0]
1242 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1243 # it'll try to create a lockfile in /dev. A better solution might
1244 # be a tempfile or something.
1245 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1246 % (Cnf["Dinstall::SigningKeyring"])
1247 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1248 % (keyring, keyserver, fingerprint)
1249 (result, output) = commands.getstatusoutput(cmd)
1251 return "'%s' failed with exit code %s" % (cmd, result)
1255 ################################################################################
1257 def gpg_keyring_args(keyrings=None):
1259 keyrings = get_active_keyring_paths()
1261 return " ".join(["--keyring %s" % x for x in keyrings])
1263 ################################################################################
1265 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1267 Check the signature of a file and return the fingerprint if the
1268 signature is valid or 'None' if it's not. The first argument is the
1269 filename whose signature should be checked. The second argument is a
1270 reject function and is called when an error is found. The reject()
1271 function must allow for two arguments: the first is the error message,
1272 the second is an optional prefix string. It's possible for reject()
1273 to be called more than once during an invocation of check_signature().
1274 The third argument is optional and is the name of the files the
1275 detached signature applies to. The fourth argument is optional and is
1276 a *list* of keyrings to use. 'autofetch' can either be None, True or
1277 False. If None, the default behaviour specified in the config will be
1283 # Ensure the filename contains no shell meta-characters or other badness
1284 if not re_taint_free.match(sig_filename):
1285 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1286 return (None, rejects)
1288 if data_filename and not re_taint_free.match(data_filename):
1289 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1290 return (None, rejects)
1293 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1295 # Autofetch the signing key if that's enabled
1296 if autofetch == None:
1297 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1299 error_msg = retrieve_key(sig_filename)
1301 rejects.append(error_msg)
1302 return (None, rejects)
1304 # Build the command line
1305 status_read, status_write = os.pipe()
1306 cmd = "gpgv --status-fd %s %s %s %s" % (
1307 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1309 # Invoke gpgv on the file
1310 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1312 # Process the status-fd output
1313 (keywords, internal_error) = process_gpgv_output(status)
1315 # If we failed to parse the status-fd output, let's just whine and bail now
1317 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1318 rejects.append(internal_error, "")
1319 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1320 return (None, rejects)
1322 # Now check for obviously bad things in the processed output
1323 if keywords.has_key("KEYREVOKED"):
1324 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1325 if keywords.has_key("BADSIG"):
1326 rejects.append("bad signature on %s." % (sig_filename))
1327 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1328 rejects.append("failed to check signature on %s." % (sig_filename))
1329 if keywords.has_key("NO_PUBKEY"):
1330 args = keywords["NO_PUBKEY"]
1333 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1334 if keywords.has_key("BADARMOR"):
1335 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1336 if keywords.has_key("NODATA"):
1337 rejects.append("no signature found in %s." % (sig_filename))
1338 if keywords.has_key("EXPKEYSIG"):
1339 args = keywords["EXPKEYSIG"]
1342 rejects.append("Signature made by expired key 0x%s" % (key))
1343 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1344 args = keywords["KEYEXPIRED"]
1348 if timestamp.count("T") == 0:
1350 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1352 expiredate = "unknown (%s)" % (timestamp)
1354 expiredate = timestamp
1355 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1357 if len(rejects) > 0:
1358 return (None, rejects)
1360 # Next check gpgv exited with a zero return code
1362 rejects.append("gpgv failed while checking %s." % (sig_filename))
1364 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1366 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1367 return (None, rejects)
1369 # Sanity check the good stuff we expect
1370 if not keywords.has_key("VALIDSIG"):
1371 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1373 args = keywords["VALIDSIG"]
1375 rejects.append("internal error while checking signature on %s." % (sig_filename))
1377 fingerprint = args[0]
1378 if not keywords.has_key("GOODSIG"):
1379 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1380 if not keywords.has_key("SIG_ID"):
1381 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1383 # Finally ensure there's not something we don't recognise
1384 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1385 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1386 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1388 for keyword in keywords.keys():
1389 if not known_keywords.has_key(keyword):
1390 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1392 if len(rejects) > 0:
1393 return (None, rejects)
1395 return (fingerprint, [])
1397 ################################################################################
1399 def gpg_get_key_addresses(fingerprint):
1400 """retreive email addresses from gpg key uids for a given fingerprint"""
1401 addresses = key_uid_email_cache.get(fingerprint)
1402 if addresses != None:
1405 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1406 % (gpg_keyring_args(), fingerprint)
1407 (result, output) = commands.getstatusoutput(cmd)
1409 for l in output.split('\n'):
1410 m = re_gpg_uid.match(l)
1413 address = m.group(1)
1414 if address.endswith('@debian.org'):
1415 # prefer @debian.org addresses
1416 # TODO: maybe not hardcode the domain
1417 addresses.insert(0, address)
1419 addresses.append(m.group(1))
1420 key_uid_email_cache[fingerprint] = addresses
1423 ################################################################################
1425 def get_logins_from_ldap(fingerprint='*'):
1426 """retrieve login from LDAP linked to a given fingerprint"""
1428 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
1429 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
1430 l = ldap.open(LDAPServer)
1431 l.simple_bind_s('','')
1432 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1433 '(keyfingerprint=%s)' % fingerprint,
1434 ['uid', 'keyfingerprint'])
1437 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
1440 ################################################################################
1442 def clean_symlink (src, dest, root):
1444 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1447 src = src.replace(root, '', 1)
1448 dest = dest.replace(root, '', 1)
1449 dest = os.path.dirname(dest)
1450 new_src = '../' * len(dest.split('/'))
1451 return new_src + src
1453 ################################################################################
1455 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
1457 Return a secure and unique filename by pre-creating it.
1459 @type directory: str
1460 @param directory: If non-null it will be the directory the file is pre-created in.
1463 @param prefix: The filename will be prefixed with this string
1466 @param suffix: The filename will end with this string
1469 @param mode: If set the file will get chmodded to those permissions
1472 @param group: If set the file will get chgrped to the specified group.
1475 @return: Returns a pair (fd, name)
1478 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
1480 os.chmod(tfname, mode)
1482 gid = grp.getgrnam(group).gr_gid
1483 os.chown(tfname, -1, gid)
1484 return (tfd, tfname)
1486 ################################################################################
1488 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
1490 Return a secure and unique directory by pre-creating it.
1493 @param parent: If non-null it will be the directory the directory is pre-created in.
1496 @param prefix: The filename will be prefixed with this string
1499 @param suffix: The filename will end with this string
1502 @param mode: If set the file will get chmodded to those permissions
1505 @param group: If set the file will get chgrped to the specified group.
1508 @return: Returns a pair (fd, name)
1512 tfname = tempfile.mkdtemp(suffix, prefix, parent)
1514 os.chmod(tfname, mode)
1516 gid = grp.getgrnam(group).gr_gid
1517 os.chown(tfname, -1, gid)
1520 ################################################################################
1522 def is_email_alias(email):
1523 """ checks if the user part of the email is listed in the alias file """
1525 if alias_cache == None:
1526 aliasfn = which_alias_file()
1529 for l in open(aliasfn):
1530 alias_cache.add(l.split(':')[0])
1531 uid = email.split('@')[0]
1532 return uid in alias_cache
1534 ################################################################################
1536 def get_changes_files(from_dir):
1538 Takes a directory and lists all .changes files in it (as well as chdir'ing
1539 to the directory; this is due to broken behaviour on the part of p-u/p-a
1540 when you're not in the right place)
1542 Returns a list of filenames
1545 # Much of the rest of p-u/p-a depends on being in the right place
1547 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1548 except OSError as e:
1549 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1551 return changes_files
1553 ################################################################################
1555 Cnf = config.Config().Cnf
1557 ################################################################################
1559 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1561 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1562 Well, actually it parsed a local copy, but let's document the source
1565 returns a dict associating source package name with a list of open wnpp
1566 bugs (Yes, there might be more than one)
1572 lines = f.readlines()
1573 except IOError as e:
1574 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1579 splited_line = line.split(": ", 1)
1580 if len(splited_line) > 1:
1581 wnpp[splited_line[0]] = splited_line[1].split("|")
1583 for source in wnpp.keys():
1585 for wnpp_bug in wnpp[source]:
1586 bug_no = re.search("(\d)+", wnpp_bug).group()
1592 ################################################################################
1594 def get_packages_from_ftp(root, suite, component, architecture):
1596 Returns an object containing apt_pkg-parseable data collected by
1597 aggregating Packages.gz files gathered for each architecture.
1600 @param root: path to ftp archive root directory
1603 @param suite: suite to extract files from
1605 @type component: string
1606 @param component: component to extract files from
1608 @type architecture: string
1609 @param architecture: architecture to extract files from
1612 @return: apt_pkg class containing package data
1614 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1615 (fd, temp_file) = temp_filename()
1616 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1618 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1619 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1620 if os.path.exists(filename):
1621 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1623 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1624 packages = open_file(temp_file)
1625 Packages = apt_pkg.ParseTagFile(packages)
1626 os.unlink(temp_file)
1629 ################################################################################
1631 def deb_extract_control(fh):
1632 """extract DEBIAN/control from a binary package"""
1633 return apt_inst.DebFile(fh).control.extractdata("control")
1635 ################################################################################
1637 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1638 """mail addresses to contact for an upload
1640 @type maintainer: str
1641 @param maintainer: Maintainer field of the .changes file
1643 @type changed_by: str
1644 @param changed_by: Changed-By field of the .changes file
1646 @type fingerprint: str
1647 @param fingerprint: fingerprint of the key used to sign the upload
1650 @return: list of RFC 2047-encoded mail addresses to contact regarding
1653 addresses = [maintainer]
1654 if changed_by != maintainer:
1655 addresses.append(changed_by)
1657 fpr_addresses = gpg_get_key_addresses(fingerprint)
1658 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1659 addresses.append(fpr_addresses[0])
1661 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1662 return encoded_addresses
1664 ################################################################################
1666 def call_editor(text="", suffix=".txt"):
1667 """run editor and return the result as a string
1670 @param text: initial text
1673 @param suffix: extension for temporary file
1676 @return: string with the edited text
1678 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1679 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1683 subprocess.check_call([editor, tmp.name])
1684 return open(tmp.name, 'r').read()
1688 ################################################################################
1690 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False):
1691 dbsuite = get_suite(suite, session)
1692 overridesuite = dbsuite
1693 if dbsuite.overridesuite is not None:
1694 overridesuite = get_suite(dbsuite.overridesuite, session)
1699 all_arches = set(arches)
1701 all_arches = set([x.arch_string for x in get_suite_architectures(suite)])
1702 all_arches -= set(["source", "all"])
1703 metakey_d = get_or_set_metadatakey("Depends", session)
1704 metakey_p = get_or_set_metadatakey("Provides", session)
1706 'suite_id': dbsuite.suite_id,
1707 'metakey_d_id': metakey_d.key_id,
1708 'metakey_p_id': metakey_p.key_id,
1710 for architecture in all_arches | set(['all']):
1713 virtual_packages = {}
1714 params['arch_id'] = get_architecture(architecture, session).arch_id
1717 SELECT b.id, b.package, s.source, c.name as component,
1718 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1719 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1721 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1722 JOIN source s ON b.source = s.id
1723 JOIN files_archive_map af ON b.file = af.file_id
1724 JOIN component c ON af.component_id = c.id
1725 WHERE b.architecture = :arch_id'''
1726 query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \
1727 from_statement(statement).params(params)
1728 for binary_id, package, source, component, depends, provides in query:
1729 sources[package] = source
1730 p2c[package] = component
1731 if depends is not None:
1732 deps[package] = depends
1733 # Maintain a counter for each virtual package. If a
1734 # Provides: exists, set the counter to 0 and count all
1735 # provides by a package not in the list for removal.
1736 # If the counter stays 0 at the end, we know that only
1737 # the to-be-removed packages provided this virtual
1739 if provides is not None:
1740 for virtual_pkg in provides.split(","):
1741 virtual_pkg = virtual_pkg.strip()
1742 if virtual_pkg == package: continue
1743 if not virtual_packages.has_key(virtual_pkg):
1744 virtual_packages[virtual_pkg] = 0
1745 if package not in removals:
1746 virtual_packages[virtual_pkg] += 1
1748 # If a virtual package is only provided by the to-be-removed
1749 # packages, treat the virtual package as to-be-removed too.
1750 for virtual_pkg in virtual_packages.keys():
1751 if virtual_packages[virtual_pkg] == 0:
1752 removals.append(virtual_pkg)
1754 # Check binary dependencies (Depends)
1755 for package in deps.keys():
1756 if package in removals: continue
1759 parsed_dep += apt_pkg.ParseDepends(deps[package])
1760 except ValueError as e:
1761 print "Error for package %s: %s" % (package, e)
1762 for dep in parsed_dep:
1763 # Check for partial breakage. If a package has a ORed
1764 # dependency, there is only a dependency problem if all
1765 # packages in the ORed depends will be removed.
1767 for dep_package, _, _ in dep:
1768 if dep_package in removals:
1770 if unsat == len(dep):
1771 component = p2c[package]
1772 source = sources[package]
1773 if component != "main":
1774 source = "%s/%s" % (source, component)
1775 all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture)
1780 print " - broken Depends:"
1782 print "# Broken Depends:"
1783 for source, bindict in sorted(all_broken.items()):
1785 for binary, arches in sorted(bindict.items()):
1786 if arches == all_arches or 'all' in arches:
1787 lines.append(binary)
1789 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1791 print ' %s: %s' % (source, lines[0])
1793 print '%s: %s' % (source, lines[0])
1794 for line in lines[1:]:
1796 print ' ' + ' ' * (len(source) + 2) + line
1798 print ' ' * (len(source) + 2) + line
1802 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1804 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1805 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1807 'suite_id': dbsuite.suite_id,
1808 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
1811 SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep
1813 JOIN source_metadata sm ON s.id = sm.src_id
1815 (SELECT source FROM src_associations
1816 WHERE suite = :suite_id)
1817 AND sm.key_id in :metakey_ids
1818 GROUP BY s.id, s.source'''
1819 query = session.query('id', 'source', 'build_dep').from_statement(statement). \
1821 for source_id, source, build_dep in query:
1822 if source in removals: continue
1824 if build_dep is not None:
1825 # Remove [arch] information since we want to see breakage on all arches
1826 build_dep = re_build_dep_arch.sub("", build_dep)
1828 parsed_dep += apt_pkg.ParseDepends(build_dep)
1829 except ValueError as e:
1830 print "Error for source %s: %s" % (source, e)
1831 for dep in parsed_dep:
1833 for dep_package, _, _ in dep:
1834 if dep_package in removals:
1836 if unsat == len(dep):
1837 component, = session.query(Component.component_name) \
1838 .join(Component.overrides) \
1839 .filter(Override.suite == overridesuite) \
1840 .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
1841 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1843 if component != "main":
1844 source = "%s/%s" % (source, component)
1845 all_broken.setdefault(source, set()).add(pp_deps(dep))
1850 print " - broken Build-Depends:"
1852 print "# Broken Build-Depends:"
1853 for source, bdeps in sorted(all_broken.items()):
1854 bdeps = sorted(bdeps)
1856 print ' %s: %s' % (source, bdeps[0])
1858 print '%s: %s' % (source, bdeps[0])
1859 for bdep in bdeps[1:]:
1861 print ' ' + ' ' * (len(source) + 2) + bdep
1863 print ' ' * (len(source) + 2) + bdep