2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
46 from dbconn import DBConn, get_architecture, get_component, get_suite, \
47 get_override_type, Keyring, session_wrapper, \
48 get_active_keyring_paths, get_primary_keyring_path, \
49 get_suite_architectures, get_or_set_metadatakey, DBSource, \
50 Component, Override, OverrideType
51 from sqlalchemy import desc
52 from dak_exceptions import *
53 from gpg import SignedFile
54 from textutils import fix_maintainer
55 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
56 re_multi_line_field, re_srchasver, re_taint_free, \
57 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
58 re_is_orig_source, re_build_dep_arch
60 from formats import parse_format, validate_changes_format
61 from srcformats import get_format_from_string
62 from collections import defaultdict
64 ################################################################################
66 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
67 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
69 alias_cache = None #: Cache for email alias checks
70 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
72 # (hashname, function, earliest_changes_version)
73 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
74 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
76 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
77 # code in lenny's Python. This also affects commands.getoutput and
79 def dak_getstatusoutput(cmd):
80 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
81 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
83 output = pipe.stdout.read()
87 if output[-1:] == '\n':
95 commands.getstatusoutput = dak_getstatusoutput
97 ################################################################################
100 """ Escape html chars """
101 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
103 ################################################################################
105 def open_file(filename, mode='r'):
107 Open C{file}, return fileobject.
109 @type filename: string
110 @param filename: path/filename to open
113 @param mode: open mode
116 @return: open fileobject
118 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
122 f = open(filename, mode)
124 raise CantOpenError(filename)
127 ################################################################################
129 def our_raw_input(prompt=""):
133 sys.stdout.write(prompt)
142 sys.stderr.write("\nUser interrupt (^D).\n")
145 ################################################################################
147 def extract_component_from_section(section, session=None):
150 if section.find('/') != -1:
151 component = section.split('/')[0]
153 # Expand default component
155 comp = get_component(section, session)
159 component = comp.component_name
161 return (section, component)
163 ################################################################################
165 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
166 require_signature = True
169 require_signature = False
171 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
172 contents = signed_file.contents
177 # Split the lines in the input, keeping the linebreaks.
178 lines = contents.splitlines(True)
181 raise ParseChangesError("[Empty changes file]")
183 # Reindex by line number so we can easily verify the format of
189 indexed_lines[index] = line[:-1]
191 num_of_lines = len(indexed_lines.keys())
194 while index < num_of_lines:
196 line = indexed_lines[index]
197 if line == "" and signing_rules == 1:
198 if index != num_of_lines:
199 raise InvalidDscError(index)
201 slf = re_single_line_field.match(line)
203 field = slf.groups()[0].lower()
204 changes[field] = slf.groups()[1]
208 changes[field] += '\n'
210 mlf = re_multi_line_field.match(line)
213 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
214 if first == 1 and changes[field] != "":
215 changes[field] += '\n'
217 changes[field] += mlf.groups()[0] + '\n'
221 changes["filecontents"] = armored_contents
223 if changes.has_key("source"):
224 # Strip the source version in brackets from the source field,
225 # put it in the "source-version" field instead.
226 srcver = re_srchasver.search(changes["source"])
228 changes["source"] = srcver.group(1)
229 changes["source-version"] = srcver.group(2)
232 raise ParseChangesError(error)
236 ################################################################################
238 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
240 Parses a changes file and returns a dictionary where each field is a
241 key. The mandatory first argument is the filename of the .changes
244 signing_rules is an optional argument:
246 - If signing_rules == -1, no signature is required.
247 - If signing_rules == 0 (the default), a signature is required.
248 - If signing_rules == 1, it turns on the same strict format checking
251 The rules for (signing_rules == 1)-mode are:
253 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
254 followed by any PGP header data and must end with a blank line.
256 - The data section must end with a blank line and must be followed by
257 "-----BEGIN PGP SIGNATURE-----".
260 changes_in = open_file(filename)
261 content = changes_in.read()
264 unicode(content, 'utf-8')
266 raise ChangesUnicodeError("Changes file not proper utf-8")
267 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
271 # Finally ensure that everything needed for .changes is there
272 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
273 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
276 for keyword in must_keywords:
277 if not changes.has_key(keyword.lower()):
278 missingfields.append(keyword)
280 if len(missingfields):
281 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
285 ################################################################################
287 def hash_key(hashname):
288 return '%ssum' % hashname
290 ################################################################################
292 def create_hash(where, files, hashname, hashfunc):
294 create_hash extends the passed files dict with the given hash by
295 iterating over all files on disk and passing them to the hashing
300 for f in files.keys():
302 file_handle = open_file(f)
303 except CantOpenError:
304 rejmsg.append("Could not open file %s for checksumming" % (f))
307 files[f][hash_key(hashname)] = hashfunc(file_handle)
312 ################################################################################
314 def check_hash(where, files, hashname, hashfunc):
316 check_hash checks the given hash in the files dict against the actual
317 files on disk. The hash values need to be present consistently in
318 all file entries. It does not modify its input in any way.
322 for f in files.keys():
326 file_handle = open_file(f)
328 # Check for the hash entry, to not trigger a KeyError.
329 if not files[f].has_key(hash_key(hashname)):
330 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
334 # Actually check the hash for correctness.
335 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
336 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
338 except CantOpenError:
339 # TODO: This happens when the file is in the pool.
340 # warn("Cannot open file %s" % f)
347 ################################################################################
349 def check_size(where, files):
351 check_size checks the file sizes in the passed files dict against the
356 for f in files.keys():
359 except OSError as exc:
361 # TODO: This happens when the file is in the pool.
365 actual_size = entry[stat.ST_SIZE]
366 size = int(files[f]["size"])
367 if size != actual_size:
368 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
369 % (f, actual_size, size, where))
372 ################################################################################
374 def check_dsc_files(dsc_filename, dsc, dsc_files):
376 Verify that the files listed in the Files field of the .dsc are
377 those expected given the announced Format.
379 @type dsc_filename: string
380 @param dsc_filename: path of .dsc file
383 @param dsc: the content of the .dsc parsed by C{parse_changes()}
385 @type dsc_files: dict
386 @param dsc_files: the file list returned by C{build_file_list()}
389 @return: all errors detected
393 # Ensure .dsc lists proper set of source files according to the format
395 has = defaultdict(lambda: 0)
398 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
399 (r'diff.gz', ('debian_diff',)),
400 (r'tar.gz', ('native_tar_gz', 'native_tar')),
401 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
402 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
403 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
404 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
408 m = re_issource.match(f)
410 rejmsg.append("%s: %s in Files field not recognised as source."
414 # Populate 'has' dictionary by resolving keys in lookup table
416 for regex, keys in ftype_lookup:
417 if re.match(regex, m.group(3)):
423 # File does not match anything in lookup table; reject
425 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
427 # Check for multiple files
428 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
429 if has[file_type] > 1:
430 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
432 # Source format specific tests
434 format = get_format_from_string(dsc['format'])
436 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
439 except UnknownFormatError:
440 # Not an error here for now
445 ################################################################################
447 def check_hash_fields(what, manifest):
449 check_hash_fields ensures that there are no checksum fields in the
450 given dict that we do not know about.
454 hashes = map(lambda x: x[0], known_hashes)
455 for field in manifest:
456 if field.startswith("checksums-"):
457 hashname = field.split("-",1)[1]
458 if hashname not in hashes:
459 rejmsg.append("Unsupported checksum field for %s "\
460 "in %s" % (hashname, what))
463 ################################################################################
465 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
466 if format >= version:
467 # The version should contain the specified hash.
470 # Import hashes from the changes
471 rejmsg = parse_checksums(".changes", files, changes, hashname)
475 # We need to calculate the hash because it can't possibly
478 return func(".changes", files, hashname, hashfunc)
480 # We could add the orig which might be in the pool to the files dict to
481 # access the checksums easily.
483 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
485 ensure_dsc_hashes' task is to ensure that each and every *present* hash
486 in the dsc is correct, i.e. identical to the changes file and if necessary
487 the pool. The latter task is delegated to check_hash.
491 if not dsc.has_key('Checksums-%s' % (hashname,)):
493 # Import hashes from the dsc
494 parse_checksums(".dsc", dsc_files, dsc, hashname)
496 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
499 ################################################################################
501 def parse_checksums(where, files, manifest, hashname):
503 field = 'checksums-%s' % hashname
504 if not field in manifest:
506 for line in manifest[field].split('\n'):
509 clist = line.strip().split(' ')
511 checksum, size, checkfile = clist
513 rejmsg.append("Cannot parse checksum line [%s]" % (line))
515 if not files.has_key(checkfile):
516 # TODO: check for the file's entry in the original files dict, not
517 # the one modified by (auto)byhand and other weird stuff
518 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
519 # (file, hashname, where))
521 if not files[checkfile]["size"] == size:
522 rejmsg.append("%s: size differs for files and checksums-%s entry "\
523 "in %s" % (checkfile, hashname, where))
525 files[checkfile][hash_key(hashname)] = checksum
526 for f in files.keys():
527 if not files[f].has_key(hash_key(hashname)):
528 rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
531 ################################################################################
533 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
535 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
538 # Make sure we have a Files: field to parse...
539 if not changes.has_key(field):
540 raise NoFilesFieldError
542 # Validate .changes Format: field
544 validate_changes_format(parse_format(changes['format']), field)
546 includes_section = (not is_a_dsc) and field == "files"
548 # Parse each entry/line:
549 for i in changes[field].split('\n'):
553 section = priority = ""
556 (md5, size, section, priority, name) = s
558 (md5, size, name) = s
560 raise ParseChangesError(i)
567 (section, component) = extract_component_from_section(section)
569 files[name] = dict(size=size, section=section,
570 priority=priority, component=component)
571 files[name][hashname] = md5
575 ################################################################################
577 # see http://bugs.debian.org/619131
578 def build_package_list(dsc, session = None):
579 if not dsc.has_key("package-list"):
584 for line in dsc["package-list"].split("\n"):
588 fields = line.split()
590 package_type = fields[1]
591 (section, component) = extract_component_from_section(fields[2])
594 # Validate type if we have a session
595 if session and get_override_type(package_type, session) is None:
596 # Maybe just warn and ignore? exit(1) might be a bit hard...
597 utils.fubar("invalid type (%s) in Package-List." % (package_type))
599 if name not in packages or packages[name]["type"] == "dsc":
600 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
604 ################################################################################
606 def send_mail (message, filename=""):
607 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
609 maildir = Cnf.get('Dir::Mail')
611 path = os.path.join(maildir, datetime.datetime.now().isoformat())
612 path = find_next_free(path)
617 # Check whether we're supposed to be sending mail
618 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
621 # If we've been passed a string dump it into a temporary file
623 (fd, filename) = tempfile.mkstemp()
624 os.write (fd, message)
627 if Cnf.has_key("Dinstall::MailWhiteList") and \
628 Cnf["Dinstall::MailWhiteList"] != "":
629 message_in = open_file(filename)
630 message_raw = modemail.message_from_file(message_in)
634 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
636 for line in whitelist_in:
637 if not re_whitespace_comment.match(line):
638 if re_re_mark.match(line):
639 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
641 whitelist.append(re.compile(re.escape(line.strip())))
646 fields = ["To", "Bcc", "Cc"]
649 value = message_raw.get(field, None)
652 for item in value.split(","):
653 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
659 if not mail_whitelisted:
660 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
664 # Doesn't have any mail in whitelist so remove the header
666 del message_raw[field]
668 message_raw.replace_header(field, ', '.join(match))
670 # Change message fields in order if we don't have a To header
671 if not message_raw.has_key("To"):
674 if message_raw.has_key(field):
675 message_raw[fields[-1]] = message_raw[field]
676 del message_raw[field]
679 # Clean up any temporary files
680 # and return, as we removed all recipients.
682 os.unlink (filename);
685 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
686 os.write (fd, message_raw.as_string(True));
690 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
692 raise SendmailFailedError(output)
694 # Clean up any temporary files
698 ################################################################################
700 def poolify (source, component=None):
701 if source[:3] == "lib":
702 return source[:4] + '/' + source + '/'
704 return source[:1] + '/' + source + '/'
706 ################################################################################
708 def move (src, dest, overwrite = 0, perms = 0o664):
709 if os.path.exists(dest) and os.path.isdir(dest):
712 dest_dir = os.path.dirname(dest)
713 if not os.path.exists(dest_dir):
714 umask = os.umask(00000)
715 os.makedirs(dest_dir, 0o2775)
717 #print "Moving %s to %s..." % (src, dest)
718 if os.path.exists(dest) and os.path.isdir(dest):
719 dest += '/' + os.path.basename(src)
720 # Don't overwrite unless forced to
721 if os.path.exists(dest):
723 fubar("Can't move %s to %s - file already exists." % (src, dest))
725 if not os.access(dest, os.W_OK):
726 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
727 shutil.copy2(src, dest)
728 os.chmod(dest, perms)
731 def copy (src, dest, overwrite = 0, perms = 0o664):
732 if os.path.exists(dest) and os.path.isdir(dest):
735 dest_dir = os.path.dirname(dest)
736 if not os.path.exists(dest_dir):
737 umask = os.umask(00000)
738 os.makedirs(dest_dir, 0o2775)
740 #print "Copying %s to %s..." % (src, dest)
741 if os.path.exists(dest) and os.path.isdir(dest):
742 dest += '/' + os.path.basename(src)
743 # Don't overwrite unless forced to
744 if os.path.exists(dest):
746 raise FileExistsError
748 if not os.access(dest, os.W_OK):
749 raise CantOverwriteError
750 shutil.copy2(src, dest)
751 os.chmod(dest, perms)
753 ################################################################################
756 res = socket.getfqdn()
757 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
758 if database_hostname:
759 return database_hostname
763 def which_conf_file ():
764 if os.getenv('DAK_CONFIG'):
765 return os.getenv('DAK_CONFIG')
767 res = socket.getfqdn()
768 # In case we allow local config files per user, try if one exists
769 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
770 homedir = os.getenv("HOME")
771 confpath = os.path.join(homedir, "/etc/dak.conf")
772 if os.path.exists(confpath):
773 apt_pkg.ReadConfigFileISC(Cnf,confpath)
775 # We are still in here, so there is no local config file or we do
776 # not allow local files. Do the normal stuff.
777 if Cnf.get("Config::" + res + "::DakConfig"):
778 return Cnf["Config::" + res + "::DakConfig"]
780 return default_config
782 def which_apt_conf_file ():
783 res = socket.getfqdn()
784 # In case we allow local config files per user, try if one exists
785 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
786 homedir = os.getenv("HOME")
787 confpath = os.path.join(homedir, "/etc/dak.conf")
788 if os.path.exists(confpath):
789 apt_pkg.ReadConfigFileISC(Cnf,default_config)
791 if Cnf.get("Config::" + res + "::AptConfig"):
792 return Cnf["Config::" + res + "::AptConfig"]
794 return default_apt_config
796 def which_alias_file():
797 hostname = socket.getfqdn()
798 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
799 if os.path.exists(aliasfn):
804 ################################################################################
806 def TemplateSubst(subst_map, filename):
807 """ Perform a substition of template """
808 templatefile = open_file(filename)
809 template = templatefile.read()
810 for k, v in subst_map.iteritems():
811 template = template.replace(k, str(v))
815 ################################################################################
817 def fubar(msg, exit_code=1):
818 sys.stderr.write("E: %s\n" % (msg))
822 sys.stderr.write("W: %s\n" % (msg))
824 ################################################################################
826 # Returns the user name with a laughable attempt at rfc822 conformancy
827 # (read: removing stray periods).
829 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
832 return pwd.getpwuid(os.getuid())[0]
834 ################################################################################
844 return ("%d%s" % (c, t))
846 ################################################################################
848 def cc_fix_changes (changes):
849 o = changes.get("architecture", "")
851 del changes["architecture"]
852 changes["architecture"] = {}
854 changes["architecture"][j] = 1
856 def changes_compare (a, b):
857 """ Sort by source name, source version, 'have source', and then by filename """
859 a_changes = parse_changes(a)
864 b_changes = parse_changes(b)
868 cc_fix_changes (a_changes)
869 cc_fix_changes (b_changes)
871 # Sort by source name
872 a_source = a_changes.get("source")
873 b_source = b_changes.get("source")
874 q = cmp (a_source, b_source)
878 # Sort by source version
879 a_version = a_changes.get("version", "0")
880 b_version = b_changes.get("version", "0")
881 q = apt_pkg.version_compare(a_version, b_version)
885 # Sort by 'have source'
886 a_has_source = a_changes["architecture"].get("source")
887 b_has_source = b_changes["architecture"].get("source")
888 if a_has_source and not b_has_source:
890 elif b_has_source and not a_has_source:
893 # Fall back to sort by filename
896 ################################################################################
898 def find_next_free (dest, too_many=100):
901 while os.path.exists(dest) and extra < too_many:
902 dest = orig_dest + '.' + repr(extra)
904 if extra >= too_many:
905 raise NoFreeFilenameError
908 ################################################################################
910 def result_join (original, sep = '\t'):
912 for i in xrange(len(original)):
913 if original[i] == None:
914 resultlist.append("")
916 resultlist.append(original[i])
917 return sep.join(resultlist)
919 ################################################################################
921 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
923 for line in str.split('\n'):
925 if line or include_blank_lines:
926 out += "%s%s\n" % (prefix, line)
927 # Strip trailing new line
932 ################################################################################
934 def validate_changes_file_arg(filename, require_changes=1):
936 'filename' is either a .changes or .dak file. If 'filename' is a
937 .dak file, it's changed to be the corresponding .changes file. The
938 function then checks if the .changes file a) exists and b) is
939 readable and returns the .changes filename if so. If there's a
940 problem, the next action depends on the option 'require_changes'
943 - If 'require_changes' == -1, errors are ignored and the .changes
944 filename is returned.
945 - If 'require_changes' == 0, a warning is given and 'None' is returned.
946 - If 'require_changes' == 1, a fatal error is raised.
951 orig_filename = filename
952 if filename.endswith(".dak"):
953 filename = filename[:-4]+".changes"
955 if not filename.endswith(".changes"):
956 error = "invalid file type; not a changes file"
958 if not os.access(filename,os.R_OK):
959 if os.path.exists(filename):
960 error = "permission denied"
962 error = "file not found"
965 if require_changes == 1:
966 fubar("%s: %s." % (orig_filename, error))
967 elif require_changes == 0:
968 warn("Skipping %s - %s" % (orig_filename, error))
970 else: # We only care about the .dak file
975 ################################################################################
978 return (arch != "source" and arch != "all")
980 ################################################################################
982 def join_with_commas_and(list):
983 if len(list) == 0: return "nothing"
984 if len(list) == 1: return list[0]
985 return ", ".join(list[:-1]) + " and " + list[-1]
987 ################################################################################
992 (pkg, version, constraint) = atom
994 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
997 pp_deps.append(pp_dep)
998 return " |".join(pp_deps)
1000 ################################################################################
1005 ################################################################################
1007 def parse_args(Options):
1008 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1009 # XXX: This should go away and everything which calls it be converted
1010 # to use SQLA properly. For now, we'll just fix it not to use
1011 # the old Pg interface though
1012 session = DBConn().session()
1014 if Options["Suite"]:
1016 for suitename in split_args(Options["Suite"]):
1017 suite = get_suite(suitename, session=session)
1018 if not suite or suite.suite_id is None:
1019 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
1021 suite_ids_list.append(suite.suite_id)
1023 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1025 fubar("No valid suite given.")
1030 if Options["Component"]:
1031 component_ids_list = []
1032 for componentname in split_args(Options["Component"]):
1033 component = get_component(componentname, session=session)
1034 if component is None:
1035 warn("component '%s' not recognised." % (componentname))
1037 component_ids_list.append(component.component_id)
1038 if component_ids_list:
1039 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1041 fubar("No valid component given.")
1045 # Process architecture
1046 con_architectures = ""
1048 if Options["Architecture"]:
1050 for archname in split_args(Options["Architecture"]):
1051 if archname == "source":
1054 arch = get_architecture(archname, session=session)
1056 warn("architecture '%s' not recognised." % (archname))
1058 arch_ids_list.append(arch.arch_id)
1060 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1062 if not check_source:
1063 fubar("No valid architecture given.")
1067 return (con_suites, con_architectures, con_components, check_source)
1069 ################################################################################
1071 def arch_compare_sw (a, b):
1073 Function for use in sorting lists of architectures.
1075 Sorts normally except that 'source' dominates all others.
1078 if a == "source" and b == "source":
1087 ################################################################################
1089 def split_args (s, dwim=1):
1091 Split command line arguments which can be separated by either commas
1092 or whitespace. If dwim is set, it will complain about string ending
1093 in comma since this usually means someone did 'dak ls -a i386, m68k
1094 foo' or something and the inevitable confusion resulting from 'm68k'
1095 being treated as an argument is undesirable.
1098 if s.find(",") == -1:
1101 if s[-1:] == "," and dwim:
1102 fubar("split_args: found trailing comma, spurious space maybe?")
1105 ################################################################################
1107 def gpgv_get_status_output(cmd, status_read, status_write):
1109 Our very own version of commands.getouputstatus(), hacked to support
1113 cmd = ['/bin/sh', '-c', cmd]
1114 p2cread, p2cwrite = os.pipe()
1115 c2pread, c2pwrite = os.pipe()
1116 errout, errin = os.pipe()
1126 for i in range(3, 256):
1127 if i != status_write:
1133 os.execvp(cmd[0], cmd)
1139 os.dup2(c2pread, c2pwrite)
1140 os.dup2(errout, errin)
1142 output = status = ""
1144 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1147 r = os.read(fd, 8196)
1149 more_data.append(fd)
1150 if fd == c2pwrite or fd == errin:
1152 elif fd == status_read:
1155 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1157 pid, exit_status = os.waitpid(pid, 0)
1159 os.close(status_write)
1160 os.close(status_read)
1170 return output, status, exit_status
1172 ################################################################################
1174 def process_gpgv_output(status):
1175 # Process the status-fd output
1178 for line in status.split('\n'):
1182 split = line.split()
1184 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1186 (gnupg, keyword) = split[:2]
1187 if gnupg != "[GNUPG:]":
1188 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1191 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1192 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1195 keywords[keyword] = args
1197 return (keywords, internal_error)
1199 ################################################################################
1201 def retrieve_key (filename, keyserver=None, keyring=None):
1203 Retrieve the key that signed 'filename' from 'keyserver' and
1204 add it to 'keyring'. Returns nothing on success, or an error message
1208 # Defaults for keyserver and keyring
1210 keyserver = Cnf["Dinstall::KeyServer"]
1212 keyring = get_primary_keyring_path()
1214 # Ensure the filename contains no shell meta-characters or other badness
1215 if not re_taint_free.match(filename):
1216 return "%s: tainted filename" % (filename)
1218 # Invoke gpgv on the file
1219 status_read, status_write = os.pipe()
1220 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1221 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1223 # Process the status-fd output
1224 (keywords, internal_error) = process_gpgv_output(status)
1226 return internal_error
1228 if not keywords.has_key("NO_PUBKEY"):
1229 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1231 fingerprint = keywords["NO_PUBKEY"][0]
1232 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1233 # it'll try to create a lockfile in /dev. A better solution might
1234 # be a tempfile or something.
1235 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1236 % (Cnf["Dinstall::SigningKeyring"])
1237 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1238 % (keyring, keyserver, fingerprint)
1239 (result, output) = commands.getstatusoutput(cmd)
1241 return "'%s' failed with exit code %s" % (cmd, result)
1245 ################################################################################
1247 def gpg_keyring_args(keyrings=None):
1249 keyrings = get_active_keyring_paths()
1251 return " ".join(["--keyring %s" % x for x in keyrings])
1253 ################################################################################
1255 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1257 Check the signature of a file and return the fingerprint if the
1258 signature is valid or 'None' if it's not. The first argument is the
1259 filename whose signature should be checked. The second argument is a
1260 reject function and is called when an error is found. The reject()
1261 function must allow for two arguments: the first is the error message,
1262 the second is an optional prefix string. It's possible for reject()
1263 to be called more than once during an invocation of check_signature().
1264 The third argument is optional and is the name of the files the
1265 detached signature applies to. The fourth argument is optional and is
1266 a *list* of keyrings to use. 'autofetch' can either be None, True or
1267 False. If None, the default behaviour specified in the config will be
1273 # Ensure the filename contains no shell meta-characters or other badness
1274 if not re_taint_free.match(sig_filename):
1275 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1276 return (None, rejects)
1278 if data_filename and not re_taint_free.match(data_filename):
1279 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1280 return (None, rejects)
1283 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1285 # Autofetch the signing key if that's enabled
1286 if autofetch == None:
1287 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1289 error_msg = retrieve_key(sig_filename)
1291 rejects.append(error_msg)
1292 return (None, rejects)
1294 # Build the command line
1295 status_read, status_write = os.pipe()
1296 cmd = "gpgv --status-fd %s %s %s %s" % (
1297 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1299 # Invoke gpgv on the file
1300 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1302 # Process the status-fd output
1303 (keywords, internal_error) = process_gpgv_output(status)
1305 # If we failed to parse the status-fd output, let's just whine and bail now
1307 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1308 rejects.append(internal_error, "")
1309 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1310 return (None, rejects)
1312 # Now check for obviously bad things in the processed output
1313 if keywords.has_key("KEYREVOKED"):
1314 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1315 if keywords.has_key("BADSIG"):
1316 rejects.append("bad signature on %s." % (sig_filename))
1317 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1318 rejects.append("failed to check signature on %s." % (sig_filename))
1319 if keywords.has_key("NO_PUBKEY"):
1320 args = keywords["NO_PUBKEY"]
1323 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1324 if keywords.has_key("BADARMOR"):
1325 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1326 if keywords.has_key("NODATA"):
1327 rejects.append("no signature found in %s." % (sig_filename))
1328 if keywords.has_key("EXPKEYSIG"):
1329 args = keywords["EXPKEYSIG"]
1332 rejects.append("Signature made by expired key 0x%s" % (key))
1333 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1334 args = keywords["KEYEXPIRED"]
1338 if timestamp.count("T") == 0:
1340 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1342 expiredate = "unknown (%s)" % (timestamp)
1344 expiredate = timestamp
1345 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1347 if len(rejects) > 0:
1348 return (None, rejects)
1350 # Next check gpgv exited with a zero return code
1352 rejects.append("gpgv failed while checking %s." % (sig_filename))
1354 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1356 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1357 return (None, rejects)
1359 # Sanity check the good stuff we expect
1360 if not keywords.has_key("VALIDSIG"):
1361 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1363 args = keywords["VALIDSIG"]
1365 rejects.append("internal error while checking signature on %s." % (sig_filename))
1367 fingerprint = args[0]
1368 if not keywords.has_key("GOODSIG"):
1369 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1370 if not keywords.has_key("SIG_ID"):
1371 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1373 # Finally ensure there's not something we don't recognise
1374 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1375 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1376 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1378 for keyword in keywords.keys():
1379 if not known_keywords.has_key(keyword):
1380 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1382 if len(rejects) > 0:
1383 return (None, rejects)
1385 return (fingerprint, [])
1387 ################################################################################
1389 def gpg_get_key_addresses(fingerprint):
1390 """retreive email addresses from gpg key uids for a given fingerprint"""
1391 addresses = key_uid_email_cache.get(fingerprint)
1392 if addresses != None:
1395 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1396 % (gpg_keyring_args(), fingerprint)
1397 (result, output) = commands.getstatusoutput(cmd)
1399 for l in output.split('\n'):
1400 m = re_gpg_uid.match(l)
1403 address = m.group(1)
1404 if address.endswith('@debian.org'):
1405 # prefer @debian.org addresses
1406 # TODO: maybe not hardcode the domain
1407 addresses.insert(0, address)
1409 addresses.append(m.group(1))
1410 key_uid_email_cache[fingerprint] = addresses
1413 ################################################################################
1415 def get_logins_from_ldap(fingerprint='*'):
1416 """retrieve login from LDAP linked to a given fingerprint"""
1418 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
1419 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
1420 l = ldap.open(LDAPServer)
1421 l.simple_bind_s('','')
1422 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1423 '(keyfingerprint=%s)' % fingerprint,
1424 ['uid', 'keyfingerprint'])
1427 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
1430 ################################################################################
1432 def clean_symlink (src, dest, root):
1434 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1437 src = src.replace(root, '', 1)
1438 dest = dest.replace(root, '', 1)
1439 dest = os.path.dirname(dest)
1440 new_src = '../' * len(dest.split('/'))
1441 return new_src + src
1443 ################################################################################
1445 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
1447 Return a secure and unique filename by pre-creating it.
1449 @type directory: str
1450 @param directory: If non-null it will be the directory the file is pre-created in.
1453 @param prefix: The filename will be prefixed with this string
1456 @param suffix: The filename will end with this string
1459 @param mode: If set the file will get chmodded to those permissions
1462 @param group: If set the file will get chgrped to the specified group.
1465 @return: Returns a pair (fd, name)
1468 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
1470 os.chmod(tfname, mode)
1472 gid = grp.getgrnam(group).gr_gid
1473 os.chown(tfname, -1, gid)
1474 return (tfd, tfname)
1476 ################################################################################
1478 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
1480 Return a secure and unique directory by pre-creating it.
1483 @param parent: If non-null it will be the directory the directory is pre-created in.
1486 @param prefix: The filename will be prefixed with this string
1489 @param suffix: The filename will end with this string
1492 @param mode: If set the file will get chmodded to those permissions
1495 @param group: If set the file will get chgrped to the specified group.
1498 @return: Returns a pair (fd, name)
1502 tfname = tempfile.mkdtemp(suffix, prefix, parent)
1504 os.chmod(tfname, mode)
1506 os.chown(tfname, -1, group)
1509 ################################################################################
1511 def is_email_alias(email):
1512 """ checks if the user part of the email is listed in the alias file """
1514 if alias_cache == None:
1515 aliasfn = which_alias_file()
1518 for l in open(aliasfn):
1519 alias_cache.add(l.split(':')[0])
1520 uid = email.split('@')[0]
1521 return uid in alias_cache
1523 ################################################################################
1525 def get_changes_files(from_dir):
1527 Takes a directory and lists all .changes files in it (as well as chdir'ing
1528 to the directory; this is due to broken behaviour on the part of p-u/p-a
1529 when you're not in the right place)
1531 Returns a list of filenames
1534 # Much of the rest of p-u/p-a depends on being in the right place
1536 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1537 except OSError as e:
1538 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1540 return changes_files
1542 ################################################################################
1546 Cnf = apt_pkg.Configuration()
1547 if not os.getenv("DAK_TEST"):
1548 apt_pkg.read_config_file_isc(Cnf,default_config)
1550 if which_conf_file() != default_config:
1551 apt_pkg.read_config_file_isc(Cnf,which_conf_file())
1553 ################################################################################
1555 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1557 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1558 Well, actually it parsed a local copy, but let's document the source
1561 returns a dict associating source package name with a list of open wnpp
1562 bugs (Yes, there might be more than one)
1568 lines = f.readlines()
1569 except IOError as e:
1570 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1575 splited_line = line.split(": ", 1)
1576 if len(splited_line) > 1:
1577 wnpp[splited_line[0]] = splited_line[1].split("|")
1579 for source in wnpp.keys():
1581 for wnpp_bug in wnpp[source]:
1582 bug_no = re.search("(\d)+", wnpp_bug).group()
1588 ################################################################################
1590 def get_packages_from_ftp(root, suite, component, architecture):
1592 Returns an object containing apt_pkg-parseable data collected by
1593 aggregating Packages.gz files gathered for each architecture.
1596 @param root: path to ftp archive root directory
1599 @param suite: suite to extract files from
1601 @type component: string
1602 @param component: component to extract files from
1604 @type architecture: string
1605 @param architecture: architecture to extract files from
1608 @return: apt_pkg class containing package data
1610 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1611 (fd, temp_file) = temp_filename()
1612 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1614 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1615 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1616 if os.path.exists(filename):
1617 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1619 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1620 packages = open_file(temp_file)
1621 Packages = apt_pkg.ParseTagFile(packages)
1622 os.unlink(temp_file)
1625 ################################################################################
1627 def deb_extract_control(fh):
1628 """extract DEBIAN/control from a binary package"""
1629 return apt_inst.DebFile(fh).control.extractdata("control")
1631 ################################################################################
1633 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1634 """mail addresses to contact for an upload
1636 @type maintainer: str
1637 @param maintainer: Maintainer field of the .changes file
1639 @type changed_by: str
1640 @param changed_by: Changed-By field of the .changes file
1642 @type fingerprint: str
1643 @param fingerprint: fingerprint of the key used to sign the upload
1646 @return: list of RFC 2047-encoded mail addresses to contact regarding
1649 addresses = [maintainer]
1650 if changed_by != maintainer:
1651 addresses.append(changed_by)
1653 fpr_addresses = gpg_get_key_addresses(fingerprint)
1654 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1655 addresses.append(fpr_addresses[0])
1657 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1658 return encoded_addresses
1660 ################################################################################
1662 def call_editor(text="", suffix=".txt"):
1663 """run editor and return the result as a string
1666 @param text: initial text
1669 @param suffix: extension for temporary file
1672 @return: string with the edited text
1674 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1675 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1679 subprocess.check_call([editor, tmp.name])
1680 return open(tmp.name, 'r').read()
1684 ################################################################################
1686 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False):
1687 dbsuite = get_suite(suite, session)
1688 overridesuite = dbsuite
1689 if dbsuite.overridesuite is not None:
1690 overridesuite = get_suite(dbsuite.overridesuite, session)
1695 all_arches = set(arches)
1697 all_arches = set([x.arch_string for x in get_suite_architectures(suite)])
1698 all_arches -= set(["source", "all"])
1699 metakey_d = get_or_set_metadatakey("Depends", session)
1700 metakey_p = get_or_set_metadatakey("Provides", session)
1702 'suite_id': dbsuite.suite_id,
1703 'metakey_d_id': metakey_d.key_id,
1704 'metakey_p_id': metakey_p.key_id,
1706 for architecture in all_arches | set(['all']):
1709 virtual_packages = {}
1710 params['arch_id'] = get_architecture(architecture, session).arch_id
1713 SELECT b.id, b.package, s.source, c.name as component,
1714 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1715 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1717 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1718 JOIN source s ON b.source = s.id
1719 JOIN files_archive_map af ON b.file = af.file_id
1720 JOIN component c ON af.component_id = c.id
1721 WHERE b.architecture = :arch_id'''
1722 query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \
1723 from_statement(statement).params(params)
1724 for binary_id, package, source, component, depends, provides in query:
1725 sources[package] = source
1726 p2c[package] = component
1727 if depends is not None:
1728 deps[package] = depends
1729 # Maintain a counter for each virtual package. If a
1730 # Provides: exists, set the counter to 0 and count all
1731 # provides by a package not in the list for removal.
1732 # If the counter stays 0 at the end, we know that only
1733 # the to-be-removed packages provided this virtual
1735 if provides is not None:
1736 for virtual_pkg in provides.split(","):
1737 virtual_pkg = virtual_pkg.strip()
1738 if virtual_pkg == package: continue
1739 if not virtual_packages.has_key(virtual_pkg):
1740 virtual_packages[virtual_pkg] = 0
1741 if package not in removals:
1742 virtual_packages[virtual_pkg] += 1
1744 # If a virtual package is only provided by the to-be-removed
1745 # packages, treat the virtual package as to-be-removed too.
1746 for virtual_pkg in virtual_packages.keys():
1747 if virtual_packages[virtual_pkg] == 0:
1748 removals.append(virtual_pkg)
1750 # Check binary dependencies (Depends)
1751 for package in deps.keys():
1752 if package in removals: continue
1755 parsed_dep += apt_pkg.ParseDepends(deps[package])
1756 except ValueError as e:
1757 print "Error for package %s: %s" % (package, e)
1758 for dep in parsed_dep:
1759 # Check for partial breakage. If a package has a ORed
1760 # dependency, there is only a dependency problem if all
1761 # packages in the ORed depends will be removed.
1763 for dep_package, _, _ in dep:
1764 if dep_package in removals:
1766 if unsat == len(dep):
1767 component = p2c[package]
1768 source = sources[package]
1769 if component != "main":
1770 source = "%s/%s" % (source, component)
1771 all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture)
1776 print " - broken Depends:"
1778 print "# Broken Depends:"
1779 for source, bindict in sorted(all_broken.items()):
1781 for binary, arches in sorted(bindict.items()):
1782 if arches == all_arches or 'all' in arches:
1783 lines.append(binary)
1785 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1787 print ' %s: %s' % (source, lines[0])
1789 print '%s: %s' % (source, lines[0])
1790 for line in lines[1:]:
1792 print ' ' + ' ' * (len(source) + 2) + line
1794 print ' ' * (len(source) + 2) + line
1798 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1800 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1801 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1803 'suite_id': dbsuite.suite_id,
1804 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
1807 SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep
1809 JOIN source_metadata sm ON s.id = sm.src_id
1811 (SELECT source FROM src_associations
1812 WHERE suite = :suite_id)
1813 AND sm.key_id in :metakey_ids
1814 GROUP BY s.id, s.source'''
1815 query = session.query('id', 'source', 'build_dep').from_statement(statement). \
1817 for source_id, source, build_dep in query:
1818 if source in removals: continue
1820 if build_dep is not None:
1821 # Remove [arch] information since we want to see breakage on all arches
1822 build_dep = re_build_dep_arch.sub("", build_dep)
1824 parsed_dep += apt_pkg.ParseDepends(build_dep)
1825 except ValueError as e:
1826 print "Error for source %s: %s" % (source, e)
1827 for dep in parsed_dep:
1829 for dep_package, _, _ in dep:
1830 if dep_package in removals:
1832 if unsat == len(dep):
1833 component, = session.query(Component.component_name) \
1834 .join(Component.overrides) \
1835 .filter(Override.suite == overridesuite) \
1836 .filter(Override.package == source) \
1837 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1839 if component != "main":
1840 source = "%s/%s" % (source, component)
1841 all_broken.setdefault(source, set()).add(pp_deps(dep))
1846 print " - broken Build-Depends:"
1848 print "# Broken Build-Depends:"
1849 for source, bdeps in sorted(all_broken.items()):
1850 bdeps = sorted(bdeps)
1852 print ' %s: %s' % (source, bdeps[0])
1854 print '%s: %s' % (source, bdeps[0])
1855 for bdep in bdeps[1:]:
1857 print ' ' + ' ' * (len(source) + 2) + bdep
1859 print ' ' * (len(source) + 2) + bdep