2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
47 import daklib.config as config
48 import daklib.daksubprocess
49 from dbconn import DBConn, get_architecture, get_component, get_suite, \
50 get_override_type, Keyring, session_wrapper, \
51 get_active_keyring_paths, get_primary_keyring_path, \
52 get_suite_architectures, get_or_set_metadatakey, DBSource, \
53 Component, Override, OverrideType
54 from sqlalchemy import desc
55 from dak_exceptions import *
56 from gpg import SignedFile
57 from textutils import fix_maintainer
58 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
59 re_multi_line_field, re_srchasver, re_taint_free, \
60 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
61 re_is_orig_source, re_build_dep_arch
63 from formats import parse_format, validate_changes_format
64 from srcformats import get_format_from_string
65 from collections import defaultdict
67 ################################################################################
69 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
71 alias_cache = None #: Cache for email alias checks
72 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
74 # (hashname, function, earliest_changes_version)
75 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
76 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
78 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
79 # code in lenny's Python. This also affects commands.getoutput and
81 def dak_getstatusoutput(cmd):
82 pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
83 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
85 output = pipe.stdout.read()
89 if output[-1:] == '\n':
97 commands.getstatusoutput = dak_getstatusoutput
99 ################################################################################
102 """ Escape html chars """
103 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
105 ################################################################################
107 def open_file(filename, mode='r'):
109 Open C{file}, return fileobject.
111 @type filename: string
112 @param filename: path/filename to open
115 @param mode: open mode
118 @return: open fileobject
120 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
124 f = open(filename, mode)
126 raise CantOpenError(filename)
129 ################################################################################
131 def our_raw_input(prompt=""):
135 sys.stdout.write(prompt)
144 sys.stderr.write("\nUser interrupt (^D).\n")
147 ################################################################################
149 def extract_component_from_section(section, session=None):
152 if section.find('/') != -1:
153 component = section.split('/')[0]
155 # Expand default component
157 comp = get_component(section, session)
161 component = comp.component_name
163 return (section, component)
165 ################################################################################
167 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
168 require_signature = True
171 require_signature = False
173 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
174 contents = signed_file.contents
179 # Split the lines in the input, keeping the linebreaks.
180 lines = contents.splitlines(True)
183 raise ParseChangesError("[Empty changes file]")
185 # Reindex by line number so we can easily verify the format of
191 indexed_lines[index] = line[:-1]
193 num_of_lines = len(indexed_lines.keys())
196 while index < num_of_lines:
198 line = indexed_lines[index]
199 if line == "" and signing_rules == 1:
200 if index != num_of_lines:
201 raise InvalidDscError(index)
203 slf = re_single_line_field.match(line)
205 field = slf.groups()[0].lower()
206 changes[field] = slf.groups()[1]
210 changes[field] += '\n'
212 mlf = re_multi_line_field.match(line)
215 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
216 if first == 1 and changes[field] != "":
217 changes[field] += '\n'
219 changes[field] += mlf.groups()[0] + '\n'
223 changes["filecontents"] = armored_contents
225 if changes.has_key("source"):
226 # Strip the source version in brackets from the source field,
227 # put it in the "source-version" field instead.
228 srcver = re_srchasver.search(changes["source"])
230 changes["source"] = srcver.group(1)
231 changes["source-version"] = srcver.group(2)
234 raise ParseChangesError(error)
238 ################################################################################
240 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
242 Parses a changes file and returns a dictionary where each field is a
243 key. The mandatory first argument is the filename of the .changes
246 signing_rules is an optional argument:
248 - If signing_rules == -1, no signature is required.
249 - If signing_rules == 0 (the default), a signature is required.
250 - If signing_rules == 1, it turns on the same strict format checking
253 The rules for (signing_rules == 1)-mode are:
255 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
256 followed by any PGP header data and must end with a blank line.
258 - The data section must end with a blank line and must be followed by
259 "-----BEGIN PGP SIGNATURE-----".
262 with open_file(filename) as changes_in:
263 content = changes_in.read()
265 unicode(content, 'utf-8')
267 raise ChangesUnicodeError("Changes file not proper utf-8")
268 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
272 # Finally ensure that everything needed for .changes is there
273 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
274 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
277 for keyword in must_keywords:
278 if not changes.has_key(keyword.lower()):
279 missingfields.append(keyword)
281 if len(missingfields):
282 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
286 ################################################################################
288 def hash_key(hashname):
289 return '%ssum' % hashname
291 ################################################################################
293 def create_hash(where, files, hashname, hashfunc):
295 create_hash extends the passed files dict with the given hash by
296 iterating over all files on disk and passing them to the hashing
301 for f in files.keys():
303 file_handle = open_file(f)
304 except CantOpenError:
305 rejmsg.append("Could not open file %s for checksumming" % (f))
308 files[f][hash_key(hashname)] = hashfunc(file_handle)
313 ################################################################################
315 def check_hash(where, files, hashname, hashfunc):
317 check_hash checks the given hash in the files dict against the actual
318 files on disk. The hash values need to be present consistently in
319 all file entries. It does not modify its input in any way.
323 for f in files.keys():
325 with open_file(f) as file_handle:
326 # Check for the hash entry, to not trigger a KeyError.
327 if not files[f].has_key(hash_key(hashname)):
328 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
332 # Actually check the hash for correctness.
333 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
334 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
336 except CantOpenError:
337 # TODO: This happens when the file is in the pool.
338 # warn("Cannot open file %s" % f)
342 ################################################################################
344 def check_size(where, files):
346 check_size checks the file sizes in the passed files dict against the
351 for f in files.keys():
354 except OSError as exc:
355 if exc.errno == errno.ENOENT:
356 # TODO: This happens when the file is in the pool.
360 actual_size = entry[stat.ST_SIZE]
361 size = int(files[f]["size"])
362 if size != actual_size:
363 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
364 % (f, actual_size, size, where))
367 ################################################################################
369 def check_dsc_files(dsc_filename, dsc, dsc_files):
371 Verify that the files listed in the Files field of the .dsc are
372 those expected given the announced Format.
374 @type dsc_filename: string
375 @param dsc_filename: path of .dsc file
378 @param dsc: the content of the .dsc parsed by C{parse_changes()}
380 @type dsc_files: dict
381 @param dsc_files: the file list returned by C{build_file_list()}
384 @return: all errors detected
388 # Ensure .dsc lists proper set of source files according to the format
390 has = defaultdict(lambda: 0)
393 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
394 (r'diff.gz', ('debian_diff',)),
395 (r'tar.gz', ('native_tar_gz', 'native_tar')),
396 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
397 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
398 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
399 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
403 m = re_issource.match(f)
405 rejmsg.append("%s: %s in Files field not recognised as source."
409 # Populate 'has' dictionary by resolving keys in lookup table
411 for regex, keys in ftype_lookup:
412 if re.match(regex, m.group(3)):
418 # File does not match anything in lookup table; reject
420 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
422 # Check for multiple files
423 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
424 if has[file_type] > 1:
425 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
427 # Source format specific tests
429 format = get_format_from_string(dsc['format'])
431 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
434 except UnknownFormatError:
435 # Not an error here for now
440 ################################################################################
442 def check_hash_fields(what, manifest):
444 check_hash_fields ensures that there are no checksum fields in the
445 given dict that we do not know about.
449 hashes = map(lambda x: x[0], known_hashes)
450 for field in manifest:
451 if field.startswith("checksums-"):
452 hashname = field.split("-",1)[1]
453 if hashname not in hashes:
454 rejmsg.append("Unsupported checksum field for %s "\
455 "in %s" % (hashname, what))
458 ################################################################################
460 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
461 if format >= version:
462 # The version should contain the specified hash.
465 # Import hashes from the changes
466 rejmsg = parse_checksums(".changes", files, changes, hashname)
470 # We need to calculate the hash because it can't possibly
473 return func(".changes", files, hashname, hashfunc)
475 # We could add the orig which might be in the pool to the files dict to
476 # access the checksums easily.
478 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
480 ensure_dsc_hashes' task is to ensure that each and every *present* hash
481 in the dsc is correct, i.e. identical to the changes file and if necessary
482 the pool. The latter task is delegated to check_hash.
486 if not dsc.has_key('Checksums-%s' % (hashname,)):
488 # Import hashes from the dsc
489 parse_checksums(".dsc", dsc_files, dsc, hashname)
491 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
494 ################################################################################
496 def parse_checksums(where, files, manifest, hashname):
498 field = 'checksums-%s' % hashname
499 if not field in manifest:
501 for line in manifest[field].split('\n'):
504 clist = line.strip().split(' ')
506 checksum, size, checkfile = clist
508 rejmsg.append("Cannot parse checksum line [%s]" % (line))
510 if not files.has_key(checkfile):
511 # TODO: check for the file's entry in the original files dict, not
512 # the one modified by (auto)byhand and other weird stuff
513 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
514 # (file, hashname, where))
516 if not files[checkfile]["size"] == size:
517 rejmsg.append("%s: size differs for files and checksums-%s entry "\
518 "in %s" % (checkfile, hashname, where))
520 files[checkfile][hash_key(hashname)] = checksum
521 for f in files.keys():
522 if not files[f].has_key(hash_key(hashname)):
523 rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
526 ################################################################################
528 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
530 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
533 # Make sure we have a Files: field to parse...
534 if not changes.has_key(field):
535 raise NoFilesFieldError
537 # Validate .changes Format: field
539 validate_changes_format(parse_format(changes['format']), field)
541 includes_section = (not is_a_dsc) and field == "files"
543 # Parse each entry/line:
544 for i in changes[field].split('\n'):
548 section = priority = ""
551 (md5, size, section, priority, name) = s
553 (md5, size, name) = s
555 raise ParseChangesError(i)
562 (section, component) = extract_component_from_section(section)
564 files[name] = dict(size=size, section=section,
565 priority=priority, component=component)
566 files[name][hashname] = md5
570 ################################################################################
572 # see http://bugs.debian.org/619131
573 def build_package_list(dsc, session = None):
574 if not dsc.has_key("package-list"):
579 for line in dsc["package-list"].split("\n"):
583 fields = line.split()
585 package_type = fields[1]
586 (section, component) = extract_component_from_section(fields[2])
589 # Validate type if we have a session
590 if session and get_override_type(package_type, session) is None:
591 # Maybe just warn and ignore? exit(1) might be a bit hard...
592 utils.fubar("invalid type (%s) in Package-List." % (package_type))
594 if name not in packages or packages[name]["type"] == "dsc":
595 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
599 ################################################################################
601 def send_mail (message, filename="", whitelists=None):
602 """sendmail wrapper, takes _either_ a message string or a file as arguments
604 @type whitelists: list of (str or None)
605 @param whitelists: path to whitelists. C{None} or an empty list whitelists
606 everything, otherwise an address is whitelisted if it is
607 included in any of the lists.
608 In addition a global whitelist can be specified in
609 Dinstall::MailWhiteList.
612 maildir = Cnf.get('Dir::Mail')
614 path = os.path.join(maildir, datetime.datetime.now().isoformat())
615 path = find_next_free(path)
616 with open(path, 'w') as fh:
619 # Check whether we're supposed to be sending mail
620 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
623 # If we've been passed a string dump it into a temporary file
625 (fd, filename) = tempfile.mkstemp()
626 os.write (fd, message)
629 if whitelists is None or None in whitelists:
631 if Cnf.get('Dinstall::MailWhiteList', ''):
632 whitelists.append(Cnf['Dinstall::MailWhiteList'])
633 if len(whitelists) != 0:
634 with open_file(filename) as message_in:
635 message_raw = modemail.message_from_file(message_in)
638 for path in whitelists:
639 with open_file(path, 'r') as whitelist_in:
640 for line in whitelist_in:
641 if not re_whitespace_comment.match(line):
642 if re_re_mark.match(line):
643 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
645 whitelist.append(re.compile(re.escape(line.strip())))
648 fields = ["To", "Bcc", "Cc"]
651 value = message_raw.get(field, None)
654 for item in value.split(","):
655 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
661 if not mail_whitelisted:
662 print "Skipping {0} since it's not whitelisted".format(item)
666 # Doesn't have any mail in whitelist so remove the header
668 del message_raw[field]
670 message_raw.replace_header(field, ', '.join(match))
672 # Change message fields in order if we don't have a To header
673 if not message_raw.has_key("To"):
676 if message_raw.has_key(field):
677 message_raw[fields[-1]] = message_raw[field]
678 del message_raw[field]
681 # Clean up any temporary files
682 # and return, as we removed all recipients.
684 os.unlink (filename);
687 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
688 os.write (fd, message_raw.as_string(True));
692 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
694 raise SendmailFailedError(output)
696 # Clean up any temporary files
700 ################################################################################
702 def poolify (source, component=None):
703 if source[:3] == "lib":
704 return source[:4] + '/' + source + '/'
706 return source[:1] + '/' + source + '/'
708 ################################################################################
710 def move (src, dest, overwrite = 0, perms = 0o664):
711 if os.path.exists(dest) and os.path.isdir(dest):
714 dest_dir = os.path.dirname(dest)
715 if not os.path.lexists(dest_dir):
716 umask = os.umask(00000)
717 os.makedirs(dest_dir, 0o2775)
719 #print "Moving %s to %s..." % (src, dest)
720 if os.path.exists(dest) and os.path.isdir(dest):
721 dest += '/' + os.path.basename(src)
722 # Don't overwrite unless forced to
723 if os.path.lexists(dest):
725 fubar("Can't move %s to %s - file already exists." % (src, dest))
727 if not os.access(dest, os.W_OK):
728 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
729 shutil.copy2(src, dest)
730 os.chmod(dest, perms)
733 def copy (src, dest, overwrite = 0, perms = 0o664):
734 if os.path.exists(dest) and os.path.isdir(dest):
737 dest_dir = os.path.dirname(dest)
738 if not os.path.exists(dest_dir):
739 umask = os.umask(00000)
740 os.makedirs(dest_dir, 0o2775)
742 #print "Copying %s to %s..." % (src, dest)
743 if os.path.exists(dest) and os.path.isdir(dest):
744 dest += '/' + os.path.basename(src)
745 # Don't overwrite unless forced to
746 if os.path.lexists(dest):
748 raise FileExistsError
750 if not os.access(dest, os.W_OK):
751 raise CantOverwriteError
752 shutil.copy2(src, dest)
753 os.chmod(dest, perms)
755 ################################################################################
757 def which_conf_file ():
758 if os.getenv('DAK_CONFIG'):
759 return os.getenv('DAK_CONFIG')
761 res = socket.getfqdn()
762 # In case we allow local config files per user, try if one exists
763 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
764 homedir = os.getenv("HOME")
765 confpath = os.path.join(homedir, "/etc/dak.conf")
766 if os.path.exists(confpath):
767 apt_pkg.read_config_file_isc(Cnf,confpath)
769 # We are still in here, so there is no local config file or we do
770 # not allow local files. Do the normal stuff.
771 if Cnf.get("Config::" + res + "::DakConfig"):
772 return Cnf["Config::" + res + "::DakConfig"]
774 return default_config
776 ################################################################################
778 def TemplateSubst(subst_map, filename):
779 """ Perform a substition of template """
780 with open_file(filename) as templatefile:
781 template = templatefile.read()
782 for k, v in subst_map.iteritems():
783 template = template.replace(k, str(v))
786 ################################################################################
788 def fubar(msg, exit_code=1):
789 sys.stderr.write("E: %s\n" % (msg))
793 sys.stderr.write("W: %s\n" % (msg))
795 ################################################################################
797 # Returns the user name with a laughable attempt at rfc822 conformancy
798 # (read: removing stray periods).
800 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
803 return pwd.getpwuid(os.getuid())[0]
805 ################################################################################
815 return ("%d%s" % (c, t))
817 ################################################################################
819 def cc_fix_changes (changes):
820 o = changes.get("architecture", "")
822 del changes["architecture"]
823 changes["architecture"] = {}
825 changes["architecture"][j] = 1
827 def changes_compare (a, b):
828 """ Sort by source name, source version, 'have source', and then by filename """
830 a_changes = parse_changes(a)
835 b_changes = parse_changes(b)
839 cc_fix_changes (a_changes)
840 cc_fix_changes (b_changes)
842 # Sort by source name
843 a_source = a_changes.get("source")
844 b_source = b_changes.get("source")
845 q = cmp (a_source, b_source)
849 # Sort by source version
850 a_version = a_changes.get("version", "0")
851 b_version = b_changes.get("version", "0")
852 q = apt_pkg.version_compare(a_version, b_version)
856 # Sort by 'have source'
857 a_has_source = a_changes["architecture"].get("source")
858 b_has_source = b_changes["architecture"].get("source")
859 if a_has_source and not b_has_source:
861 elif b_has_source and not a_has_source:
864 # Fall back to sort by filename
867 ################################################################################
869 def find_next_free (dest, too_many=100):
872 while os.path.lexists(dest) and extra < too_many:
873 dest = orig_dest + '.' + repr(extra)
875 if extra >= too_many:
876 raise NoFreeFilenameError
879 ################################################################################
881 def result_join (original, sep = '\t'):
883 for i in xrange(len(original)):
884 if original[i] == None:
885 resultlist.append("")
887 resultlist.append(original[i])
888 return sep.join(resultlist)
890 ################################################################################
892 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
894 for line in str.split('\n'):
896 if line or include_blank_lines:
897 out += "%s%s\n" % (prefix, line)
898 # Strip trailing new line
903 ################################################################################
905 def validate_changes_file_arg(filename, require_changes=1):
907 'filename' is either a .changes or .dak file. If 'filename' is a
908 .dak file, it's changed to be the corresponding .changes file. The
909 function then checks if the .changes file a) exists and b) is
910 readable and returns the .changes filename if so. If there's a
911 problem, the next action depends on the option 'require_changes'
914 - If 'require_changes' == -1, errors are ignored and the .changes
915 filename is returned.
916 - If 'require_changes' == 0, a warning is given and 'None' is returned.
917 - If 'require_changes' == 1, a fatal error is raised.
922 orig_filename = filename
923 if filename.endswith(".dak"):
924 filename = filename[:-4]+".changes"
926 if not filename.endswith(".changes"):
927 error = "invalid file type; not a changes file"
929 if not os.access(filename,os.R_OK):
930 if os.path.exists(filename):
931 error = "permission denied"
933 error = "file not found"
936 if require_changes == 1:
937 fubar("%s: %s." % (orig_filename, error))
938 elif require_changes == 0:
939 warn("Skipping %s - %s" % (orig_filename, error))
941 else: # We only care about the .dak file
946 ################################################################################
949 return (arch != "source" and arch != "all")
951 ################################################################################
953 def join_with_commas_and(list):
954 if len(list) == 0: return "nothing"
955 if len(list) == 1: return list[0]
956 return ", ".join(list[:-1]) + " and " + list[-1]
958 ################################################################################
963 (pkg, version, constraint) = atom
965 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
968 pp_deps.append(pp_dep)
969 return " |".join(pp_deps)
971 ################################################################################
976 ################################################################################
978 def parse_args(Options):
979 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
980 # XXX: This should go away and everything which calls it be converted
981 # to use SQLA properly. For now, we'll just fix it not to use
982 # the old Pg interface though
983 session = DBConn().session()
987 for suitename in split_args(Options["Suite"]):
988 suite = get_suite(suitename, session=session)
989 if not suite or suite.suite_id is None:
990 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
992 suite_ids_list.append(suite.suite_id)
994 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
996 fubar("No valid suite given.")
1001 if Options["Component"]:
1002 component_ids_list = []
1003 for componentname in split_args(Options["Component"]):
1004 component = get_component(componentname, session=session)
1005 if component is None:
1006 warn("component '%s' not recognised." % (componentname))
1008 component_ids_list.append(component.component_id)
1009 if component_ids_list:
1010 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1012 fubar("No valid component given.")
1016 # Process architecture
1017 con_architectures = ""
1019 if Options["Architecture"]:
1021 for archname in split_args(Options["Architecture"]):
1022 if archname == "source":
1025 arch = get_architecture(archname, session=session)
1027 warn("architecture '%s' not recognised." % (archname))
1029 arch_ids_list.append(arch.arch_id)
1031 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1033 if not check_source:
1034 fubar("No valid architecture given.")
1038 return (con_suites, con_architectures, con_components, check_source)
1040 ################################################################################
1042 def arch_compare_sw (a, b):
1044 Function for use in sorting lists of architectures.
1046 Sorts normally except that 'source' dominates all others.
1049 if a == "source" and b == "source":
1058 ################################################################################
1060 def split_args (s, dwim=1):
1062 Split command line arguments which can be separated by either commas
1063 or whitespace. If dwim is set, it will complain about string ending
1064 in comma since this usually means someone did 'dak ls -a i386, m68k
1065 foo' or something and the inevitable confusion resulting from 'm68k'
1066 being treated as an argument is undesirable.
1069 if s.find(",") == -1:
1072 if s[-1:] == "," and dwim:
1073 fubar("split_args: found trailing comma, spurious space maybe?")
1076 ################################################################################
1078 def gpgv_get_status_output(cmd, status_read, status_write):
1080 Our very own version of commands.getouputstatus(), hacked to support
1084 cmd = ['/bin/sh', '-c', cmd]
1085 p2cread, p2cwrite = os.pipe()
1086 c2pread, c2pwrite = os.pipe()
1087 errout, errin = os.pipe()
1097 for i in range(3, 256):
1098 if i != status_write:
1104 os.execvp(cmd[0], cmd)
1110 os.dup2(c2pread, c2pwrite)
1111 os.dup2(errout, errin)
1113 output = status = ""
1115 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1118 r = os.read(fd, 8196)
1120 more_data.append(fd)
1121 if fd == c2pwrite or fd == errin:
1123 elif fd == status_read:
1126 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1128 pid, exit_status = os.waitpid(pid, 0)
1130 os.close(status_write)
1131 os.close(status_read)
1141 return output, status, exit_status
1143 ################################################################################
1145 def process_gpgv_output(status):
1146 # Process the status-fd output
1149 for line in status.split('\n'):
1153 split = line.split()
1155 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1157 (gnupg, keyword) = split[:2]
1158 if gnupg != "[GNUPG:]":
1159 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1162 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1163 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1166 keywords[keyword] = args
1168 return (keywords, internal_error)
1170 ################################################################################
1172 def retrieve_key (filename, keyserver=None, keyring=None):
1174 Retrieve the key that signed 'filename' from 'keyserver' and
1175 add it to 'keyring'. Returns nothing on success, or an error message
1179 # Defaults for keyserver and keyring
1181 keyserver = Cnf["Dinstall::KeyServer"]
1183 keyring = get_primary_keyring_path()
1185 # Ensure the filename contains no shell meta-characters or other badness
1186 if not re_taint_free.match(filename):
1187 return "%s: tainted filename" % (filename)
1189 # Invoke gpgv on the file
1190 status_read, status_write = os.pipe()
1191 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1192 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1194 # Process the status-fd output
1195 (keywords, internal_error) = process_gpgv_output(status)
1197 return internal_error
1199 if not keywords.has_key("NO_PUBKEY"):
1200 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1202 fingerprint = keywords["NO_PUBKEY"][0]
1203 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1204 # it'll try to create a lockfile in /dev. A better solution might
1205 # be a tempfile or something.
1206 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1207 % (Cnf["Dinstall::SigningKeyring"])
1208 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1209 % (keyring, keyserver, fingerprint)
1210 (result, output) = commands.getstatusoutput(cmd)
1212 return "'%s' failed with exit code %s" % (cmd, result)
1216 ################################################################################
1218 def gpg_keyring_args(keyrings=None):
1220 keyrings = get_active_keyring_paths()
1222 return " ".join(["--keyring %s" % x for x in keyrings])
1224 ################################################################################
1226 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1228 Check the signature of a file and return the fingerprint if the
1229 signature is valid or 'None' if it's not. The first argument is the
1230 filename whose signature should be checked. The second argument is a
1231 reject function and is called when an error is found. The reject()
1232 function must allow for two arguments: the first is the error message,
1233 the second is an optional prefix string. It's possible for reject()
1234 to be called more than once during an invocation of check_signature().
1235 The third argument is optional and is the name of the files the
1236 detached signature applies to. The fourth argument is optional and is
1237 a *list* of keyrings to use. 'autofetch' can either be None, True or
1238 False. If None, the default behaviour specified in the config will be
1244 # Ensure the filename contains no shell meta-characters or other badness
1245 if not re_taint_free.match(sig_filename):
1246 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1247 return (None, rejects)
1249 if data_filename and not re_taint_free.match(data_filename):
1250 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1251 return (None, rejects)
1254 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1256 # Autofetch the signing key if that's enabled
1257 if autofetch == None:
1258 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1260 error_msg = retrieve_key(sig_filename)
1262 rejects.append(error_msg)
1263 return (None, rejects)
1265 # Build the command line
1266 status_read, status_write = os.pipe()
1267 cmd = "gpgv --status-fd %s %s %s %s" % (
1268 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1270 # Invoke gpgv on the file
1271 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1273 # Process the status-fd output
1274 (keywords, internal_error) = process_gpgv_output(status)
1276 # If we failed to parse the status-fd output, let's just whine and bail now
1278 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1279 rejects.append(internal_error, "")
1280 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1281 return (None, rejects)
1283 # Now check for obviously bad things in the processed output
1284 if keywords.has_key("KEYREVOKED"):
1285 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1286 if keywords.has_key("BADSIG"):
1287 rejects.append("bad signature on %s." % (sig_filename))
1288 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1289 rejects.append("failed to check signature on %s." % (sig_filename))
1290 if keywords.has_key("NO_PUBKEY"):
1291 args = keywords["NO_PUBKEY"]
1294 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1295 if keywords.has_key("BADARMOR"):
1296 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1297 if keywords.has_key("NODATA"):
1298 rejects.append("no signature found in %s." % (sig_filename))
1299 if keywords.has_key("EXPKEYSIG"):
1300 args = keywords["EXPKEYSIG"]
1303 rejects.append("Signature made by expired key 0x%s" % (key))
1304 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1305 args = keywords["KEYEXPIRED"]
1309 if timestamp.count("T") == 0:
1311 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1313 expiredate = "unknown (%s)" % (timestamp)
1315 expiredate = timestamp
1316 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1318 if len(rejects) > 0:
1319 return (None, rejects)
1321 # Next check gpgv exited with a zero return code
1323 rejects.append("gpgv failed while checking %s." % (sig_filename))
1325 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1327 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1328 return (None, rejects)
1330 # Sanity check the good stuff we expect
1331 if not keywords.has_key("VALIDSIG"):
1332 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1334 args = keywords["VALIDSIG"]
1336 rejects.append("internal error while checking signature on %s." % (sig_filename))
1338 fingerprint = args[0]
1339 if not keywords.has_key("GOODSIG"):
1340 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1341 if not keywords.has_key("SIG_ID"):
1342 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1344 # Finally ensure there's not something we don't recognise
1345 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1346 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1347 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1349 for keyword in keywords.keys():
1350 if not known_keywords.has_key(keyword):
1351 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1353 if len(rejects) > 0:
1354 return (None, rejects)
1356 return (fingerprint, [])
1358 ################################################################################
1360 def gpg_get_key_addresses(fingerprint):
1361 """retreive email addresses from gpg key uids for a given fingerprint"""
1362 addresses = key_uid_email_cache.get(fingerprint)
1363 if addresses != None:
1366 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1367 % (gpg_keyring_args(), fingerprint)
1368 (result, output) = commands.getstatusoutput(cmd)
1370 for l in output.split('\n'):
1371 m = re_gpg_uid.match(l)
1374 address = m.group(1)
1375 if address.endswith('@debian.org'):
1376 # prefer @debian.org addresses
1377 # TODO: maybe not hardcode the domain
1378 addresses.insert(0, address)
1380 addresses.append(m.group(1))
1381 key_uid_email_cache[fingerprint] = addresses
1384 ################################################################################
1386 def get_logins_from_ldap(fingerprint='*'):
1387 """retrieve login from LDAP linked to a given fingerprint"""
1389 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
1390 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
1391 l = ldap.open(LDAPServer)
1392 l.simple_bind_s('','')
1393 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1394 '(keyfingerprint=%s)' % fingerprint,
1395 ['uid', 'keyfingerprint'])
1398 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
1401 ################################################################################
1403 def get_users_from_ldap():
1404 """retrieve login and user names from LDAP"""
1406 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
1407 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
1408 l = ldap.open(LDAPServer)
1409 l.simple_bind_s('','')
1410 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1411 '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
1416 for k in ('cn', 'mn', 'sn'):
1418 if elem[k][0] != '-':
1419 name.append(elem[k][0])
1422 users[' '.join(name)] = elem['uid'][0]
1425 ################################################################################
1427 def clean_symlink (src, dest, root):
1429 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1432 src = src.replace(root, '', 1)
1433 dest = dest.replace(root, '', 1)
1434 dest = os.path.dirname(dest)
1435 new_src = '../' * len(dest.split('/'))
1436 return new_src + src
1438 ################################################################################
1440 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
1442 Return a secure and unique filename by pre-creating it.
1444 @type directory: str
1445 @param directory: If non-null it will be the directory the file is pre-created in.
1448 @param prefix: The filename will be prefixed with this string
1451 @param suffix: The filename will end with this string
1454 @param mode: If set the file will get chmodded to those permissions
1457 @param group: If set the file will get chgrped to the specified group.
1460 @return: Returns a pair (fd, name)
1463 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
1465 os.chmod(tfname, mode)
1467 gid = grp.getgrnam(group).gr_gid
1468 os.chown(tfname, -1, gid)
1469 return (tfd, tfname)
1471 ################################################################################
1473 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
1475 Return a secure and unique directory by pre-creating it.
1478 @param parent: If non-null it will be the directory the directory is pre-created in.
1481 @param prefix: The filename will be prefixed with this string
1484 @param suffix: The filename will end with this string
1487 @param mode: If set the file will get chmodded to those permissions
1490 @param group: If set the file will get chgrped to the specified group.
1493 @return: Returns a pair (fd, name)
1497 tfname = tempfile.mkdtemp(suffix, prefix, parent)
1499 os.chmod(tfname, mode)
1501 gid = grp.getgrnam(group).gr_gid
1502 os.chown(tfname, -1, gid)
1505 ################################################################################
1507 def is_email_alias(email):
1508 """ checks if the user part of the email is listed in the alias file """
1510 if alias_cache == None:
1511 aliasfn = which_alias_file()
1514 for l in open(aliasfn):
1515 alias_cache.add(l.split(':')[0])
1516 uid = email.split('@')[0]
1517 return uid in alias_cache
1519 ################################################################################
1521 def get_changes_files(from_dir):
1523 Takes a directory and lists all .changes files in it (as well as chdir'ing
1524 to the directory; this is due to broken behaviour on the part of p-u/p-a
1525 when you're not in the right place)
1527 Returns a list of filenames
1530 # Much of the rest of p-u/p-a depends on being in the right place
1532 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1533 except OSError as e:
1534 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1536 return changes_files
1538 ################################################################################
1540 Cnf = config.Config().Cnf
1542 ################################################################################
1544 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1546 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1547 Well, actually it parsed a local copy, but let's document the source
1550 returns a dict associating source package name with a list of open wnpp
1551 bugs (Yes, there might be more than one)
1557 lines = f.readlines()
1558 except IOError as e:
1559 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1564 splited_line = line.split(": ", 1)
1565 if len(splited_line) > 1:
1566 wnpp[splited_line[0]] = splited_line[1].split("|")
1568 for source in wnpp.keys():
1570 for wnpp_bug in wnpp[source]:
1571 bug_no = re.search("(\d)+", wnpp_bug).group()
1577 ################################################################################
1579 def get_packages_from_ftp(root, suite, component, architecture):
1581 Returns an object containing apt_pkg-parseable data collected by
1582 aggregating Packages.gz files gathered for each architecture.
1585 @param root: path to ftp archive root directory
1588 @param suite: suite to extract files from
1590 @type component: string
1591 @param component: component to extract files from
1593 @type architecture: string
1594 @param architecture: architecture to extract files from
1597 @return: apt_pkg class containing package data
1599 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1600 (fd, temp_file) = temp_filename()
1601 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1603 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1604 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1605 if os.path.exists(filename):
1606 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1608 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1609 packages = open_file(temp_file)
1610 Packages = apt_pkg.TagFile(packages)
1611 os.unlink(temp_file)
1614 ################################################################################
1616 def deb_extract_control(fh):
1617 """extract DEBIAN/control from a binary package"""
1618 return apt_inst.DebFile(fh).control.extractdata("control")
1620 ################################################################################
1622 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1623 """mail addresses to contact for an upload
1625 @type maintainer: str
1626 @param maintainer: Maintainer field of the .changes file
1628 @type changed_by: str
1629 @param changed_by: Changed-By field of the .changes file
1631 @type fingerprint: str
1632 @param fingerprint: fingerprint of the key used to sign the upload
1635 @return: list of RFC 2047-encoded mail addresses to contact regarding
1638 addresses = [maintainer]
1639 if changed_by != maintainer:
1640 addresses.append(changed_by)
1642 fpr_addresses = gpg_get_key_addresses(fingerprint)
1643 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1644 addresses.append(fpr_addresses[0])
1646 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1647 return encoded_addresses
1649 ################################################################################
1651 def call_editor(text="", suffix=".txt"):
1652 """run editor and return the result as a string
1655 @param text: initial text
1658 @param suffix: extension for temporary file
1661 @return: string with the edited text
1663 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1664 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1668 daklib.daksubprocess.check_call([editor, tmp.name])
1669 return open(tmp.name, 'r').read()
1673 ################################################################################
1675 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False):
1676 dbsuite = get_suite(suite, session)
1677 overridesuite = dbsuite
1678 if dbsuite.overridesuite is not None:
1679 overridesuite = get_suite(dbsuite.overridesuite, session)
1684 all_arches = set(arches)
1686 all_arches = set([x.arch_string for x in get_suite_architectures(suite)])
1687 all_arches -= set(["source", "all"])
1688 metakey_d = get_or_set_metadatakey("Depends", session)
1689 metakey_p = get_or_set_metadatakey("Provides", session)
1691 'suite_id': dbsuite.suite_id,
1692 'metakey_d_id': metakey_d.key_id,
1693 'metakey_p_id': metakey_p.key_id,
1695 for architecture in all_arches | set(['all']):
1698 virtual_packages = {}
1699 params['arch_id'] = get_architecture(architecture, session).arch_id
1702 SELECT b.id, b.package, s.source, c.name as component,
1703 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1704 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1706 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1707 JOIN source s ON b.source = s.id
1708 JOIN files_archive_map af ON b.file = af.file_id
1709 JOIN component c ON af.component_id = c.id
1710 WHERE b.architecture = :arch_id'''
1711 query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \
1712 from_statement(statement).params(params)
1713 for binary_id, package, source, component, depends, provides in query:
1714 sources[package] = source
1715 p2c[package] = component
1716 if depends is not None:
1717 deps[package] = depends
1718 # Maintain a counter for each virtual package. If a
1719 # Provides: exists, set the counter to 0 and count all
1720 # provides by a package not in the list for removal.
1721 # If the counter stays 0 at the end, we know that only
1722 # the to-be-removed packages provided this virtual
1724 if provides is not None:
1725 for virtual_pkg in provides.split(","):
1726 virtual_pkg = virtual_pkg.strip()
1727 if virtual_pkg == package: continue
1728 if not virtual_packages.has_key(virtual_pkg):
1729 virtual_packages[virtual_pkg] = 0
1730 if package not in removals:
1731 virtual_packages[virtual_pkg] += 1
1733 # If a virtual package is only provided by the to-be-removed
1734 # packages, treat the virtual package as to-be-removed too.
1735 for virtual_pkg in virtual_packages.keys():
1736 if virtual_packages[virtual_pkg] == 0:
1737 removals.append(virtual_pkg)
1739 # Check binary dependencies (Depends)
1740 for package in deps.keys():
1741 if package in removals: continue
1744 parsed_dep += apt_pkg.parse_depends(deps[package])
1745 except ValueError as e:
1746 print "Error for package %s: %s" % (package, e)
1747 for dep in parsed_dep:
1748 # Check for partial breakage. If a package has a ORed
1749 # dependency, there is only a dependency problem if all
1750 # packages in the ORed depends will be removed.
1752 for dep_package, _, _ in dep:
1753 if dep_package in removals:
1755 if unsat == len(dep):
1756 component = p2c[package]
1757 source = sources[package]
1758 if component != "main":
1759 source = "%s/%s" % (source, component)
1760 all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture)
1765 print " - broken Depends:"
1767 print "# Broken Depends:"
1768 for source, bindict in sorted(all_broken.items()):
1770 for binary, arches in sorted(bindict.items()):
1771 if arches == all_arches or 'all' in arches:
1772 lines.append(binary)
1774 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1776 print ' %s: %s' % (source, lines[0])
1778 print '%s: %s' % (source, lines[0])
1779 for line in lines[1:]:
1781 print ' ' + ' ' * (len(source) + 2) + line
1783 print ' ' * (len(source) + 2) + line
1787 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1789 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1790 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1792 'suite_id': dbsuite.suite_id,
1793 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
1796 SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep
1798 JOIN source_metadata sm ON s.id = sm.src_id
1800 (SELECT source FROM src_associations
1801 WHERE suite = :suite_id)
1802 AND sm.key_id in :metakey_ids
1803 GROUP BY s.id, s.source'''
1804 query = session.query('id', 'source', 'build_dep').from_statement(statement). \
1806 for source_id, source, build_dep in query:
1807 if source in removals: continue
1809 if build_dep is not None:
1810 # Remove [arch] information since we want to see breakage on all arches
1811 build_dep = re_build_dep_arch.sub("", build_dep)
1813 parsed_dep += apt_pkg.parse_depends(build_dep)
1814 except ValueError as e:
1815 print "Error for source %s: %s" % (source, e)
1816 for dep in parsed_dep:
1818 for dep_package, _, _ in dep:
1819 if dep_package in removals:
1821 if unsat == len(dep):
1822 component, = session.query(Component.component_name) \
1823 .join(Component.overrides) \
1824 .filter(Override.suite == overridesuite) \
1825 .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
1826 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1829 if component != "main":
1830 key = "%s/%s" % (source, component)
1831 all_broken.setdefault(key, set()).add(pp_deps(dep))
1836 print " - broken Build-Depends:"
1838 print "# Broken Build-Depends:"
1839 for source, bdeps in sorted(all_broken.items()):
1840 bdeps = sorted(bdeps)
1842 print ' %s: %s' % (source, bdeps[0])
1844 print '%s: %s' % (source, bdeps[0])
1845 for bdep in bdeps[1:]:
1847 print ' ' + ' ' * (len(source) + 2) + bdep
1849 print ' ' * (len(source) + 2) + bdep