2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
47 import daklib.config as config
48 import daklib.daksubprocess
49 from dbconn import DBConn, get_architecture, get_component, get_suite, \
50 get_override_type, Keyring, session_wrapper, \
51 get_active_keyring_paths, get_primary_keyring_path, \
52 get_suite_architectures, get_or_set_metadatakey, DBSource, \
53 Component, Override, OverrideType
54 from sqlalchemy import desc
55 from dak_exceptions import *
56 from gpg import SignedFile
57 from textutils import fix_maintainer
58 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
59 re_multi_line_field, re_srchasver, re_taint_free, \
60 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
61 re_is_orig_source, re_build_dep_arch
63 from formats import parse_format, validate_changes_format
64 from srcformats import get_format_from_string
65 from collections import defaultdict
67 ################################################################################
69 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
71 alias_cache = None #: Cache for email alias checks
72 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
74 # (hashname, function, earliest_changes_version)
75 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
76 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
78 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
79 # code in lenny's Python. This also affects commands.getoutput and
81 def dak_getstatusoutput(cmd):
82 pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
83 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
85 output = pipe.stdout.read()
89 if output[-1:] == '\n':
97 commands.getstatusoutput = dak_getstatusoutput
99 ################################################################################
102 """ Escape html chars """
103 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
105 ################################################################################
107 def open_file(filename, mode='r'):
109 Open C{file}, return fileobject.
111 @type filename: string
112 @param filename: path/filename to open
115 @param mode: open mode
118 @return: open fileobject
120 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
124 f = open(filename, mode)
126 raise CantOpenError(filename)
129 ################################################################################
131 def our_raw_input(prompt=""):
135 sys.stdout.write(prompt)
144 sys.stderr.write("\nUser interrupt (^D).\n")
147 ################################################################################
149 def extract_component_from_section(section, session=None):
152 if section.find('/') != -1:
153 component = section.split('/')[0]
155 # Expand default component
157 comp = get_component(section, session)
161 component = comp.component_name
163 return (section, component)
165 ################################################################################
167 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
168 require_signature = True
171 require_signature = False
173 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
174 contents = signed_file.contents
179 # Split the lines in the input, keeping the linebreaks.
180 lines = contents.splitlines(True)
183 raise ParseChangesError("[Empty changes file]")
185 # Reindex by line number so we can easily verify the format of
191 indexed_lines[index] = line[:-1]
193 num_of_lines = len(indexed_lines.keys())
196 while index < num_of_lines:
198 line = indexed_lines[index]
199 if line == "" and signing_rules == 1:
200 if index != num_of_lines:
201 raise InvalidDscError(index)
203 slf = re_single_line_field.match(line)
205 field = slf.groups()[0].lower()
206 changes[field] = slf.groups()[1]
210 changes[field] += '\n'
212 mlf = re_multi_line_field.match(line)
215 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
216 if first == 1 and changes[field] != "":
217 changes[field] += '\n'
219 changes[field] += mlf.groups()[0] + '\n'
223 changes["filecontents"] = armored_contents
225 if changes.has_key("source"):
226 # Strip the source version in brackets from the source field,
227 # put it in the "source-version" field instead.
228 srcver = re_srchasver.search(changes["source"])
230 changes["source"] = srcver.group(1)
231 changes["source-version"] = srcver.group(2)
234 raise ParseChangesError(error)
238 ################################################################################
240 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
242 Parses a changes file and returns a dictionary where each field is a
243 key. The mandatory first argument is the filename of the .changes
246 signing_rules is an optional argument:
248 - If signing_rules == -1, no signature is required.
249 - If signing_rules == 0 (the default), a signature is required.
250 - If signing_rules == 1, it turns on the same strict format checking
253 The rules for (signing_rules == 1)-mode are:
255 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
256 followed by any PGP header data and must end with a blank line.
258 - The data section must end with a blank line and must be followed by
259 "-----BEGIN PGP SIGNATURE-----".
262 changes_in = open_file(filename)
263 content = changes_in.read()
266 unicode(content, 'utf-8')
268 raise ChangesUnicodeError("Changes file not proper utf-8")
269 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
273 # Finally ensure that everything needed for .changes is there
274 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
275 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
278 for keyword in must_keywords:
279 if not changes.has_key(keyword.lower()):
280 missingfields.append(keyword)
282 if len(missingfields):
283 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
287 ################################################################################
289 def hash_key(hashname):
290 return '%ssum' % hashname
292 ################################################################################
294 def create_hash(where, files, hashname, hashfunc):
296 create_hash extends the passed files dict with the given hash by
297 iterating over all files on disk and passing them to the hashing
302 for f in files.keys():
304 file_handle = open_file(f)
305 except CantOpenError:
306 rejmsg.append("Could not open file %s for checksumming" % (f))
309 files[f][hash_key(hashname)] = hashfunc(file_handle)
314 ################################################################################
316 def check_hash(where, files, hashname, hashfunc):
318 check_hash checks the given hash in the files dict against the actual
319 files on disk. The hash values need to be present consistently in
320 all file entries. It does not modify its input in any way.
324 for f in files.keys():
328 file_handle = open_file(f)
330 # Check for the hash entry, to not trigger a KeyError.
331 if not files[f].has_key(hash_key(hashname)):
332 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
336 # Actually check the hash for correctness.
337 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
338 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
340 except CantOpenError:
341 # TODO: This happens when the file is in the pool.
342 # warn("Cannot open file %s" % f)
349 ################################################################################
351 def check_size(where, files):
353 check_size checks the file sizes in the passed files dict against the
358 for f in files.keys():
361 except OSError as exc:
362 if exc.errno == errno.ENOENT:
363 # TODO: This happens when the file is in the pool.
367 actual_size = entry[stat.ST_SIZE]
368 size = int(files[f]["size"])
369 if size != actual_size:
370 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
371 % (f, actual_size, size, where))
374 ################################################################################
376 def check_dsc_files(dsc_filename, dsc, dsc_files):
378 Verify that the files listed in the Files field of the .dsc are
379 those expected given the announced Format.
381 @type dsc_filename: string
382 @param dsc_filename: path of .dsc file
385 @param dsc: the content of the .dsc parsed by C{parse_changes()}
387 @type dsc_files: dict
388 @param dsc_files: the file list returned by C{build_file_list()}
391 @return: all errors detected
395 # Ensure .dsc lists proper set of source files according to the format
397 has = defaultdict(lambda: 0)
400 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
401 (r'diff.gz', ('debian_diff',)),
402 (r'tar.gz', ('native_tar_gz', 'native_tar')),
403 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
404 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
405 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
406 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
410 m = re_issource.match(f)
412 rejmsg.append("%s: %s in Files field not recognised as source."
416 # Populate 'has' dictionary by resolving keys in lookup table
418 for regex, keys in ftype_lookup:
419 if re.match(regex, m.group(3)):
425 # File does not match anything in lookup table; reject
427 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
429 # Check for multiple files
430 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
431 if has[file_type] > 1:
432 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
434 # Source format specific tests
436 format = get_format_from_string(dsc['format'])
438 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
441 except UnknownFormatError:
442 # Not an error here for now
447 ################################################################################
449 def check_hash_fields(what, manifest):
451 check_hash_fields ensures that there are no checksum fields in the
452 given dict that we do not know about.
456 hashes = map(lambda x: x[0], known_hashes)
457 for field in manifest:
458 if field.startswith("checksums-"):
459 hashname = field.split("-",1)[1]
460 if hashname not in hashes:
461 rejmsg.append("Unsupported checksum field for %s "\
462 "in %s" % (hashname, what))
465 ################################################################################
467 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
468 if format >= version:
469 # The version should contain the specified hash.
472 # Import hashes from the changes
473 rejmsg = parse_checksums(".changes", files, changes, hashname)
477 # We need to calculate the hash because it can't possibly
480 return func(".changes", files, hashname, hashfunc)
482 # We could add the orig which might be in the pool to the files dict to
483 # access the checksums easily.
485 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
487 ensure_dsc_hashes' task is to ensure that each and every *present* hash
488 in the dsc is correct, i.e. identical to the changes file and if necessary
489 the pool. The latter task is delegated to check_hash.
493 if not dsc.has_key('Checksums-%s' % (hashname,)):
495 # Import hashes from the dsc
496 parse_checksums(".dsc", dsc_files, dsc, hashname)
498 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
501 ################################################################################
503 def parse_checksums(where, files, manifest, hashname):
505 field = 'checksums-%s' % hashname
506 if not field in manifest:
508 for line in manifest[field].split('\n'):
511 clist = line.strip().split(' ')
513 checksum, size, checkfile = clist
515 rejmsg.append("Cannot parse checksum line [%s]" % (line))
517 if not files.has_key(checkfile):
518 # TODO: check for the file's entry in the original files dict, not
519 # the one modified by (auto)byhand and other weird stuff
520 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
521 # (file, hashname, where))
523 if not files[checkfile]["size"] == size:
524 rejmsg.append("%s: size differs for files and checksums-%s entry "\
525 "in %s" % (checkfile, hashname, where))
527 files[checkfile][hash_key(hashname)] = checksum
528 for f in files.keys():
529 if not files[f].has_key(hash_key(hashname)):
530 rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
533 ################################################################################
535 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
537 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
540 # Make sure we have a Files: field to parse...
541 if not changes.has_key(field):
542 raise NoFilesFieldError
544 # Validate .changes Format: field
546 validate_changes_format(parse_format(changes['format']), field)
548 includes_section = (not is_a_dsc) and field == "files"
550 # Parse each entry/line:
551 for i in changes[field].split('\n'):
555 section = priority = ""
558 (md5, size, section, priority, name) = s
560 (md5, size, name) = s
562 raise ParseChangesError(i)
569 (section, component) = extract_component_from_section(section)
571 files[name] = dict(size=size, section=section,
572 priority=priority, component=component)
573 files[name][hashname] = md5
577 ################################################################################
579 # see http://bugs.debian.org/619131
580 def build_package_list(dsc, session = None):
581 if not dsc.has_key("package-list"):
586 for line in dsc["package-list"].split("\n"):
590 fields = line.split()
592 package_type = fields[1]
593 (section, component) = extract_component_from_section(fields[2])
596 # Validate type if we have a session
597 if session and get_override_type(package_type, session) is None:
598 # Maybe just warn and ignore? exit(1) might be a bit hard...
599 utils.fubar("invalid type (%s) in Package-List." % (package_type))
601 if name not in packages or packages[name]["type"] == "dsc":
602 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
606 ################################################################################
608 def send_mail (message, filename="", whitelists=None):
609 """sendmail wrapper, takes _either_ a message string or a file as arguments
611 @type whitelists: list of (str or None)
612 @param whitelists: path to whitelists. C{None} or an empty list whitelists
613 everything, otherwise an address is whitelisted if it is
614 included in any of the lists.
615 In addition a global whitelist can be specified in
616 Dinstall::MailWhiteList.
619 maildir = Cnf.get('Dir::Mail')
621 path = os.path.join(maildir, datetime.datetime.now().isoformat())
622 path = find_next_free(path)
627 # Check whether we're supposed to be sending mail
628 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
631 # If we've been passed a string dump it into a temporary file
633 (fd, filename) = tempfile.mkstemp()
634 os.write (fd, message)
637 if whitelists is None or None in whitelists:
639 if Cnf.get('Dinstall::MailWhiteList', ''):
640 whitelists.append(Cnf['Dinstall::MailWhiteList'])
641 if len(whitelists) != 0:
642 message_in = open_file(filename)
643 message_raw = modemail.message_from_file(message_in)
647 for path in whitelists:
648 with open_file(path, 'r') as whitelist_in:
649 for line in whitelist_in:
650 if not re_whitespace_comment.match(line):
651 if re_re_mark.match(line):
652 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
654 whitelist.append(re.compile(re.escape(line.strip())))
657 fields = ["To", "Bcc", "Cc"]
660 value = message_raw.get(field, None)
663 for item in value.split(","):
664 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
670 if not mail_whitelisted:
671 print "Skipping {0} since it's not whitelisted".format(item)
675 # Doesn't have any mail in whitelist so remove the header
677 del message_raw[field]
679 message_raw.replace_header(field, ', '.join(match))
681 # Change message fields in order if we don't have a To header
682 if not message_raw.has_key("To"):
685 if message_raw.has_key(field):
686 message_raw[fields[-1]] = message_raw[field]
687 del message_raw[field]
690 # Clean up any temporary files
691 # and return, as we removed all recipients.
693 os.unlink (filename);
696 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
697 os.write (fd, message_raw.as_string(True));
701 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
703 raise SendmailFailedError(output)
705 # Clean up any temporary files
709 ################################################################################
711 def poolify (source, component=None):
712 if source[:3] == "lib":
713 return source[:4] + '/' + source + '/'
715 return source[:1] + '/' + source + '/'
717 ################################################################################
719 def move (src, dest, overwrite = 0, perms = 0o664):
720 if os.path.exists(dest) and os.path.isdir(dest):
723 dest_dir = os.path.dirname(dest)
724 if not os.path.lexists(dest_dir):
725 umask = os.umask(00000)
726 os.makedirs(dest_dir, 0o2775)
728 #print "Moving %s to %s..." % (src, dest)
729 if os.path.exists(dest) and os.path.isdir(dest):
730 dest += '/' + os.path.basename(src)
731 # Don't overwrite unless forced to
732 if os.path.lexists(dest):
734 fubar("Can't move %s to %s - file already exists." % (src, dest))
736 if not os.access(dest, os.W_OK):
737 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
738 shutil.copy2(src, dest)
739 os.chmod(dest, perms)
742 def copy (src, dest, overwrite = 0, perms = 0o664):
743 if os.path.exists(dest) and os.path.isdir(dest):
746 dest_dir = os.path.dirname(dest)
747 if not os.path.exists(dest_dir):
748 umask = os.umask(00000)
749 os.makedirs(dest_dir, 0o2775)
751 #print "Copying %s to %s..." % (src, dest)
752 if os.path.exists(dest) and os.path.isdir(dest):
753 dest += '/' + os.path.basename(src)
754 # Don't overwrite unless forced to
755 if os.path.lexists(dest):
757 raise FileExistsError
759 if not os.access(dest, os.W_OK):
760 raise CantOverwriteError
761 shutil.copy2(src, dest)
762 os.chmod(dest, perms)
764 ################################################################################
766 def which_conf_file ():
767 if os.getenv('DAK_CONFIG'):
768 return os.getenv('DAK_CONFIG')
770 res = socket.getfqdn()
771 # In case we allow local config files per user, try if one exists
772 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
773 homedir = os.getenv("HOME")
774 confpath = os.path.join(homedir, "/etc/dak.conf")
775 if os.path.exists(confpath):
776 apt_pkg.read_config_file_isc(Cnf,confpath)
778 # We are still in here, so there is no local config file or we do
779 # not allow local files. Do the normal stuff.
780 if Cnf.get("Config::" + res + "::DakConfig"):
781 return Cnf["Config::" + res + "::DakConfig"]
783 return default_config
785 ################################################################################
787 def TemplateSubst(subst_map, filename):
788 """ Perform a substition of template """
789 templatefile = open_file(filename)
790 template = templatefile.read()
791 for k, v in subst_map.iteritems():
792 template = template.replace(k, str(v))
796 ################################################################################
798 def fubar(msg, exit_code=1):
799 sys.stderr.write("E: %s\n" % (msg))
803 sys.stderr.write("W: %s\n" % (msg))
805 ################################################################################
807 # Returns the user name with a laughable attempt at rfc822 conformancy
808 # (read: removing stray periods).
810 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
813 return pwd.getpwuid(os.getuid())[0]
815 ################################################################################
825 return ("%d%s" % (c, t))
827 ################################################################################
829 def cc_fix_changes (changes):
830 o = changes.get("architecture", "")
832 del changes["architecture"]
833 changes["architecture"] = {}
835 changes["architecture"][j] = 1
837 def changes_compare (a, b):
838 """ Sort by source name, source version, 'have source', and then by filename """
840 a_changes = parse_changes(a)
845 b_changes = parse_changes(b)
849 cc_fix_changes (a_changes)
850 cc_fix_changes (b_changes)
852 # Sort by source name
853 a_source = a_changes.get("source")
854 b_source = b_changes.get("source")
855 q = cmp (a_source, b_source)
859 # Sort by source version
860 a_version = a_changes.get("version", "0")
861 b_version = b_changes.get("version", "0")
862 q = apt_pkg.version_compare(a_version, b_version)
866 # Sort by 'have source'
867 a_has_source = a_changes["architecture"].get("source")
868 b_has_source = b_changes["architecture"].get("source")
869 if a_has_source and not b_has_source:
871 elif b_has_source and not a_has_source:
874 # Fall back to sort by filename
877 ################################################################################
879 def find_next_free (dest, too_many=100):
882 while os.path.lexists(dest) and extra < too_many:
883 dest = orig_dest + '.' + repr(extra)
885 if extra >= too_many:
886 raise NoFreeFilenameError
889 ################################################################################
891 def result_join (original, sep = '\t'):
893 for i in xrange(len(original)):
894 if original[i] == None:
895 resultlist.append("")
897 resultlist.append(original[i])
898 return sep.join(resultlist)
900 ################################################################################
902 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
904 for line in str.split('\n'):
906 if line or include_blank_lines:
907 out += "%s%s\n" % (prefix, line)
908 # Strip trailing new line
913 ################################################################################
915 def validate_changes_file_arg(filename, require_changes=1):
917 'filename' is either a .changes or .dak file. If 'filename' is a
918 .dak file, it's changed to be the corresponding .changes file. The
919 function then checks if the .changes file a) exists and b) is
920 readable and returns the .changes filename if so. If there's a
921 problem, the next action depends on the option 'require_changes'
924 - If 'require_changes' == -1, errors are ignored and the .changes
925 filename is returned.
926 - If 'require_changes' == 0, a warning is given and 'None' is returned.
927 - If 'require_changes' == 1, a fatal error is raised.
932 orig_filename = filename
933 if filename.endswith(".dak"):
934 filename = filename[:-4]+".changes"
936 if not filename.endswith(".changes"):
937 error = "invalid file type; not a changes file"
939 if not os.access(filename,os.R_OK):
940 if os.path.exists(filename):
941 error = "permission denied"
943 error = "file not found"
946 if require_changes == 1:
947 fubar("%s: %s." % (orig_filename, error))
948 elif require_changes == 0:
949 warn("Skipping %s - %s" % (orig_filename, error))
951 else: # We only care about the .dak file
956 ################################################################################
959 return (arch != "source" and arch != "all")
961 ################################################################################
963 def join_with_commas_and(list):
964 if len(list) == 0: return "nothing"
965 if len(list) == 1: return list[0]
966 return ", ".join(list[:-1]) + " and " + list[-1]
968 ################################################################################
973 (pkg, version, constraint) = atom
975 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
978 pp_deps.append(pp_dep)
979 return " |".join(pp_deps)
981 ################################################################################
986 ################################################################################
988 def parse_args(Options):
989 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
990 # XXX: This should go away and everything which calls it be converted
991 # to use SQLA properly. For now, we'll just fix it not to use
992 # the old Pg interface though
993 session = DBConn().session()
997 for suitename in split_args(Options["Suite"]):
998 suite = get_suite(suitename, session=session)
999 if not suite or suite.suite_id is None:
1000 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
1002 suite_ids_list.append(suite.suite_id)
1004 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1006 fubar("No valid suite given.")
1011 if Options["Component"]:
1012 component_ids_list = []
1013 for componentname in split_args(Options["Component"]):
1014 component = get_component(componentname, session=session)
1015 if component is None:
1016 warn("component '%s' not recognised." % (componentname))
1018 component_ids_list.append(component.component_id)
1019 if component_ids_list:
1020 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1022 fubar("No valid component given.")
1026 # Process architecture
1027 con_architectures = ""
1029 if Options["Architecture"]:
1031 for archname in split_args(Options["Architecture"]):
1032 if archname == "source":
1035 arch = get_architecture(archname, session=session)
1037 warn("architecture '%s' not recognised." % (archname))
1039 arch_ids_list.append(arch.arch_id)
1041 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1043 if not check_source:
1044 fubar("No valid architecture given.")
1048 return (con_suites, con_architectures, con_components, check_source)
1050 ################################################################################
1052 def arch_compare_sw (a, b):
1054 Function for use in sorting lists of architectures.
1056 Sorts normally except that 'source' dominates all others.
1059 if a == "source" and b == "source":
1068 ################################################################################
1070 def split_args (s, dwim=1):
1072 Split command line arguments which can be separated by either commas
1073 or whitespace. If dwim is set, it will complain about string ending
1074 in comma since this usually means someone did 'dak ls -a i386, m68k
1075 foo' or something and the inevitable confusion resulting from 'm68k'
1076 being treated as an argument is undesirable.
1079 if s.find(",") == -1:
1082 if s[-1:] == "," and dwim:
1083 fubar("split_args: found trailing comma, spurious space maybe?")
1086 ################################################################################
1088 def gpgv_get_status_output(cmd, status_read, status_write):
1090 Our very own version of commands.getouputstatus(), hacked to support
1094 cmd = ['/bin/sh', '-c', cmd]
1095 p2cread, p2cwrite = os.pipe()
1096 c2pread, c2pwrite = os.pipe()
1097 errout, errin = os.pipe()
1107 for i in range(3, 256):
1108 if i != status_write:
1114 os.execvp(cmd[0], cmd)
1120 os.dup2(c2pread, c2pwrite)
1121 os.dup2(errout, errin)
1123 output = status = ""
1125 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1128 r = os.read(fd, 8196)
1130 more_data.append(fd)
1131 if fd == c2pwrite or fd == errin:
1133 elif fd == status_read:
1136 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1138 pid, exit_status = os.waitpid(pid, 0)
1140 os.close(status_write)
1141 os.close(status_read)
1151 return output, status, exit_status
1153 ################################################################################
1155 def process_gpgv_output(status):
1156 # Process the status-fd output
1159 for line in status.split('\n'):
1163 split = line.split()
1165 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1167 (gnupg, keyword) = split[:2]
1168 if gnupg != "[GNUPG:]":
1169 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1172 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1173 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1176 keywords[keyword] = args
1178 return (keywords, internal_error)
1180 ################################################################################
1182 def retrieve_key (filename, keyserver=None, keyring=None):
1184 Retrieve the key that signed 'filename' from 'keyserver' and
1185 add it to 'keyring'. Returns nothing on success, or an error message
1189 # Defaults for keyserver and keyring
1191 keyserver = Cnf["Dinstall::KeyServer"]
1193 keyring = get_primary_keyring_path()
1195 # Ensure the filename contains no shell meta-characters or other badness
1196 if not re_taint_free.match(filename):
1197 return "%s: tainted filename" % (filename)
1199 # Invoke gpgv on the file
1200 status_read, status_write = os.pipe()
1201 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1202 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1204 # Process the status-fd output
1205 (keywords, internal_error) = process_gpgv_output(status)
1207 return internal_error
1209 if not keywords.has_key("NO_PUBKEY"):
1210 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1212 fingerprint = keywords["NO_PUBKEY"][0]
1213 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1214 # it'll try to create a lockfile in /dev. A better solution might
1215 # be a tempfile or something.
1216 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1217 % (Cnf["Dinstall::SigningKeyring"])
1218 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1219 % (keyring, keyserver, fingerprint)
1220 (result, output) = commands.getstatusoutput(cmd)
1222 return "'%s' failed with exit code %s" % (cmd, result)
1226 ################################################################################
1228 def gpg_keyring_args(keyrings=None):
1230 keyrings = get_active_keyring_paths()
1232 return " ".join(["--keyring %s" % x for x in keyrings])
1234 ################################################################################
1236 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1238 Check the signature of a file and return the fingerprint if the
1239 signature is valid or 'None' if it's not. The first argument is the
1240 filename whose signature should be checked. The second argument is a
1241 reject function and is called when an error is found. The reject()
1242 function must allow for two arguments: the first is the error message,
1243 the second is an optional prefix string. It's possible for reject()
1244 to be called more than once during an invocation of check_signature().
1245 The third argument is optional and is the name of the files the
1246 detached signature applies to. The fourth argument is optional and is
1247 a *list* of keyrings to use. 'autofetch' can either be None, True or
1248 False. If None, the default behaviour specified in the config will be
1254 # Ensure the filename contains no shell meta-characters or other badness
1255 if not re_taint_free.match(sig_filename):
1256 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1257 return (None, rejects)
1259 if data_filename and not re_taint_free.match(data_filename):
1260 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1261 return (None, rejects)
1264 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1266 # Autofetch the signing key if that's enabled
1267 if autofetch == None:
1268 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1270 error_msg = retrieve_key(sig_filename)
1272 rejects.append(error_msg)
1273 return (None, rejects)
1275 # Build the command line
1276 status_read, status_write = os.pipe()
1277 cmd = "gpgv --status-fd %s %s %s %s" % (
1278 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1280 # Invoke gpgv on the file
1281 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1283 # Process the status-fd output
1284 (keywords, internal_error) = process_gpgv_output(status)
1286 # If we failed to parse the status-fd output, let's just whine and bail now
1288 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1289 rejects.append(internal_error, "")
1290 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1291 return (None, rejects)
1293 # Now check for obviously bad things in the processed output
1294 if keywords.has_key("KEYREVOKED"):
1295 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1296 if keywords.has_key("BADSIG"):
1297 rejects.append("bad signature on %s." % (sig_filename))
1298 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1299 rejects.append("failed to check signature on %s." % (sig_filename))
1300 if keywords.has_key("NO_PUBKEY"):
1301 args = keywords["NO_PUBKEY"]
1304 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1305 if keywords.has_key("BADARMOR"):
1306 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1307 if keywords.has_key("NODATA"):
1308 rejects.append("no signature found in %s." % (sig_filename))
1309 if keywords.has_key("EXPKEYSIG"):
1310 args = keywords["EXPKEYSIG"]
1313 rejects.append("Signature made by expired key 0x%s" % (key))
1314 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1315 args = keywords["KEYEXPIRED"]
1319 if timestamp.count("T") == 0:
1321 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1323 expiredate = "unknown (%s)" % (timestamp)
1325 expiredate = timestamp
1326 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1328 if len(rejects) > 0:
1329 return (None, rejects)
1331 # Next check gpgv exited with a zero return code
1333 rejects.append("gpgv failed while checking %s." % (sig_filename))
1335 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1337 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1338 return (None, rejects)
1340 # Sanity check the good stuff we expect
1341 if not keywords.has_key("VALIDSIG"):
1342 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1344 args = keywords["VALIDSIG"]
1346 rejects.append("internal error while checking signature on %s." % (sig_filename))
1348 fingerprint = args[0]
1349 if not keywords.has_key("GOODSIG"):
1350 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1351 if not keywords.has_key("SIG_ID"):
1352 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1354 # Finally ensure there's not something we don't recognise
1355 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1356 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1357 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1359 for keyword in keywords.keys():
1360 if not known_keywords.has_key(keyword):
1361 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1363 if len(rejects) > 0:
1364 return (None, rejects)
1366 return (fingerprint, [])
1368 ################################################################################
1370 def gpg_get_key_addresses(fingerprint):
1371 """retreive email addresses from gpg key uids for a given fingerprint"""
1372 addresses = key_uid_email_cache.get(fingerprint)
1373 if addresses != None:
1376 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1377 % (gpg_keyring_args(), fingerprint)
1378 (result, output) = commands.getstatusoutput(cmd)
1380 for l in output.split('\n'):
1381 m = re_gpg_uid.match(l)
1384 address = m.group(1)
1385 if address.endswith('@debian.org'):
1386 # prefer @debian.org addresses
1387 # TODO: maybe not hardcode the domain
1388 addresses.insert(0, address)
1390 addresses.append(m.group(1))
1391 key_uid_email_cache[fingerprint] = addresses
1394 ################################################################################
1396 def get_logins_from_ldap(fingerprint='*'):
1397 """retrieve login from LDAP linked to a given fingerprint"""
1399 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
1400 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
1401 l = ldap.open(LDAPServer)
1402 l.simple_bind_s('','')
1403 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1404 '(keyfingerprint=%s)' % fingerprint,
1405 ['uid', 'keyfingerprint'])
1408 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
1411 ################################################################################
1413 def get_users_from_ldap():
1414 """retrieve login and user names from LDAP"""
1416 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
1417 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
1418 l = ldap.open(LDAPServer)
1419 l.simple_bind_s('','')
1420 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1421 '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
1426 for k in ('cn', 'mn', 'sn'):
1428 if elem[k][0] != '-':
1429 name.append(elem[k][0])
1432 users[' '.join(name)] = elem['uid'][0]
1435 ################################################################################
1437 def clean_symlink (src, dest, root):
1439 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1442 src = src.replace(root, '', 1)
1443 dest = dest.replace(root, '', 1)
1444 dest = os.path.dirname(dest)
1445 new_src = '../' * len(dest.split('/'))
1446 return new_src + src
1448 ################################################################################
1450 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
1452 Return a secure and unique filename by pre-creating it.
1454 @type directory: str
1455 @param directory: If non-null it will be the directory the file is pre-created in.
1458 @param prefix: The filename will be prefixed with this string
1461 @param suffix: The filename will end with this string
1464 @param mode: If set the file will get chmodded to those permissions
1467 @param group: If set the file will get chgrped to the specified group.
1470 @return: Returns a pair (fd, name)
1473 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
1475 os.chmod(tfname, mode)
1477 gid = grp.getgrnam(group).gr_gid
1478 os.chown(tfname, -1, gid)
1479 return (tfd, tfname)
1481 ################################################################################
1483 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
1485 Return a secure and unique directory by pre-creating it.
1488 @param parent: If non-null it will be the directory the directory is pre-created in.
1491 @param prefix: The filename will be prefixed with this string
1494 @param suffix: The filename will end with this string
1497 @param mode: If set the file will get chmodded to those permissions
1500 @param group: If set the file will get chgrped to the specified group.
1503 @return: Returns a pair (fd, name)
1507 tfname = tempfile.mkdtemp(suffix, prefix, parent)
1509 os.chmod(tfname, mode)
1511 gid = grp.getgrnam(group).gr_gid
1512 os.chown(tfname, -1, gid)
1515 ################################################################################
1517 def is_email_alias(email):
1518 """ checks if the user part of the email is listed in the alias file """
1520 if alias_cache == None:
1521 aliasfn = which_alias_file()
1524 for l in open(aliasfn):
1525 alias_cache.add(l.split(':')[0])
1526 uid = email.split('@')[0]
1527 return uid in alias_cache
1529 ################################################################################
1531 def get_changes_files(from_dir):
1533 Takes a directory and lists all .changes files in it (as well as chdir'ing
1534 to the directory; this is due to broken behaviour on the part of p-u/p-a
1535 when you're not in the right place)
1537 Returns a list of filenames
1540 # Much of the rest of p-u/p-a depends on being in the right place
1542 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1543 except OSError as e:
1544 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1546 return changes_files
1548 ################################################################################
1550 Cnf = config.Config().Cnf
1552 ################################################################################
1554 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1556 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1557 Well, actually it parsed a local copy, but let's document the source
1560 returns a dict associating source package name with a list of open wnpp
1561 bugs (Yes, there might be more than one)
1567 lines = f.readlines()
1568 except IOError as e:
1569 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1574 splited_line = line.split(": ", 1)
1575 if len(splited_line) > 1:
1576 wnpp[splited_line[0]] = splited_line[1].split("|")
1578 for source in wnpp.keys():
1580 for wnpp_bug in wnpp[source]:
1581 bug_no = re.search("(\d)+", wnpp_bug).group()
1587 ################################################################################
1589 def get_packages_from_ftp(root, suite, component, architecture):
1591 Returns an object containing apt_pkg-parseable data collected by
1592 aggregating Packages.gz files gathered for each architecture.
1595 @param root: path to ftp archive root directory
1598 @param suite: suite to extract files from
1600 @type component: string
1601 @param component: component to extract files from
1603 @type architecture: string
1604 @param architecture: architecture to extract files from
1607 @return: apt_pkg class containing package data
1609 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1610 (fd, temp_file) = temp_filename()
1611 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1613 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1614 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1615 if os.path.exists(filename):
1616 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1618 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1619 packages = open_file(temp_file)
1620 Packages = apt_pkg.TagFile(packages)
1621 os.unlink(temp_file)
1624 ################################################################################
1626 def deb_extract_control(fh):
1627 """extract DEBIAN/control from a binary package"""
1628 return apt_inst.DebFile(fh).control.extractdata("control")
1630 ################################################################################
1632 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1633 """mail addresses to contact for an upload
1635 @type maintainer: str
1636 @param maintainer: Maintainer field of the .changes file
1638 @type changed_by: str
1639 @param changed_by: Changed-By field of the .changes file
1641 @type fingerprint: str
1642 @param fingerprint: fingerprint of the key used to sign the upload
1645 @return: list of RFC 2047-encoded mail addresses to contact regarding
1648 addresses = [maintainer]
1649 if changed_by != maintainer:
1650 addresses.append(changed_by)
1652 fpr_addresses = gpg_get_key_addresses(fingerprint)
1653 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1654 addresses.append(fpr_addresses[0])
1656 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1657 return encoded_addresses
1659 ################################################################################
1661 def call_editor(text="", suffix=".txt"):
1662 """run editor and return the result as a string
1665 @param text: initial text
1668 @param suffix: extension for temporary file
1671 @return: string with the edited text
1673 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1674 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1678 daklib.daksubprocess.check_call([editor, tmp.name])
1679 return open(tmp.name, 'r').read()
1683 ################################################################################
1685 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False):
1686 dbsuite = get_suite(suite, session)
1687 overridesuite = dbsuite
1688 if dbsuite.overridesuite is not None:
1689 overridesuite = get_suite(dbsuite.overridesuite, session)
1694 all_arches = set(arches)
1696 all_arches = set([x.arch_string for x in get_suite_architectures(suite)])
1697 all_arches -= set(["source", "all"])
1698 metakey_d = get_or_set_metadatakey("Depends", session)
1699 metakey_p = get_or_set_metadatakey("Provides", session)
1701 'suite_id': dbsuite.suite_id,
1702 'metakey_d_id': metakey_d.key_id,
1703 'metakey_p_id': metakey_p.key_id,
1705 for architecture in all_arches | set(['all']):
1708 virtual_packages = {}
1709 params['arch_id'] = get_architecture(architecture, session).arch_id
1712 SELECT b.id, b.package, s.source, c.name as component,
1713 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1714 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1716 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1717 JOIN source s ON b.source = s.id
1718 JOIN files_archive_map af ON b.file = af.file_id
1719 JOIN component c ON af.component_id = c.id
1720 WHERE b.architecture = :arch_id'''
1721 query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \
1722 from_statement(statement).params(params)
1723 for binary_id, package, source, component, depends, provides in query:
1724 sources[package] = source
1725 p2c[package] = component
1726 if depends is not None:
1727 deps[package] = depends
1728 # Maintain a counter for each virtual package. If a
1729 # Provides: exists, set the counter to 0 and count all
1730 # provides by a package not in the list for removal.
1731 # If the counter stays 0 at the end, we know that only
1732 # the to-be-removed packages provided this virtual
1734 if provides is not None:
1735 for virtual_pkg in provides.split(","):
1736 virtual_pkg = virtual_pkg.strip()
1737 if virtual_pkg == package: continue
1738 if not virtual_packages.has_key(virtual_pkg):
1739 virtual_packages[virtual_pkg] = 0
1740 if package not in removals:
1741 virtual_packages[virtual_pkg] += 1
1743 # If a virtual package is only provided by the to-be-removed
1744 # packages, treat the virtual package as to-be-removed too.
1745 for virtual_pkg in virtual_packages.keys():
1746 if virtual_packages[virtual_pkg] == 0:
1747 removals.append(virtual_pkg)
1749 # Check binary dependencies (Depends)
1750 for package in deps.keys():
1751 if package in removals: continue
1754 parsed_dep += apt_pkg.parse_depends(deps[package])
1755 except ValueError as e:
1756 print "Error for package %s: %s" % (package, e)
1757 for dep in parsed_dep:
1758 # Check for partial breakage. If a package has a ORed
1759 # dependency, there is only a dependency problem if all
1760 # packages in the ORed depends will be removed.
1762 for dep_package, _, _ in dep:
1763 if dep_package in removals:
1765 if unsat == len(dep):
1766 component = p2c[package]
1767 source = sources[package]
1768 if component != "main":
1769 source = "%s/%s" % (source, component)
1770 all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture)
1775 print " - broken Depends:"
1777 print "# Broken Depends:"
1778 for source, bindict in sorted(all_broken.items()):
1780 for binary, arches in sorted(bindict.items()):
1781 if arches == all_arches or 'all' in arches:
1782 lines.append(binary)
1784 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1786 print ' %s: %s' % (source, lines[0])
1788 print '%s: %s' % (source, lines[0])
1789 for line in lines[1:]:
1791 print ' ' + ' ' * (len(source) + 2) + line
1793 print ' ' * (len(source) + 2) + line
1797 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1799 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1800 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1802 'suite_id': dbsuite.suite_id,
1803 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
1806 SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep
1808 JOIN source_metadata sm ON s.id = sm.src_id
1810 (SELECT source FROM src_associations
1811 WHERE suite = :suite_id)
1812 AND sm.key_id in :metakey_ids
1813 GROUP BY s.id, s.source'''
1814 query = session.query('id', 'source', 'build_dep').from_statement(statement). \
1816 for source_id, source, build_dep in query:
1817 if source in removals: continue
1819 if build_dep is not None:
1820 # Remove [arch] information since we want to see breakage on all arches
1821 build_dep = re_build_dep_arch.sub("", build_dep)
1823 parsed_dep += apt_pkg.parse_depends(build_dep)
1824 except ValueError as e:
1825 print "Error for source %s: %s" % (source, e)
1826 for dep in parsed_dep:
1828 for dep_package, _, _ in dep:
1829 if dep_package in removals:
1831 if unsat == len(dep):
1832 component, = session.query(Component.component_name) \
1833 .join(Component.overrides) \
1834 .filter(Override.suite == overridesuite) \
1835 .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
1836 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1839 if component != "main":
1840 key = "%s/%s" % (source, component)
1841 all_broken.setdefault(key, set()).add(pp_deps(dep))
1846 print " - broken Build-Depends:"
1848 print "# Broken Build-Depends:"
1849 for source, bdeps in sorted(all_broken.items()):
1850 bdeps = sorted(bdeps)
1852 print ' %s: %s' % (source, bdeps[0])
1854 print '%s: %s' % (source, bdeps[0])
1855 for bdep in bdeps[1:]:
1857 print ' ' + ' ' * (len(source) + 2) + bdep
1859 print ' ' * (len(source) + 2) + bdep