2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
43 import email as modemail
48 import daklib.config as config
49 import daklib.daksubprocess
50 from dbconn import DBConn, get_architecture, get_component, get_suite, \
51 get_override_type, Keyring, session_wrapper, \
52 get_active_keyring_paths, get_primary_keyring_path, \
53 get_suite_architectures, get_or_set_metadatakey, DBSource, \
54 Component, Override, OverrideType
55 from sqlalchemy import desc
56 from dak_exceptions import *
57 from gpg import SignedFile
58 from textutils import fix_maintainer
59 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
60 re_multi_line_field, re_srchasver, re_taint_free, \
61 re_re_mark, re_whitespace_comment, re_issource, \
62 re_is_orig_source, re_build_dep_arch, re_parse_maintainer
64 from formats import parse_format, validate_changes_format
65 from srcformats import get_format_from_string
66 from collections import defaultdict
68 ################################################################################
70 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
72 alias_cache = None #: Cache for email alias checks
73 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
75 # (hashname, function, earliest_changes_version)
76 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
77 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
79 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
80 # code in lenny's Python. This also affects commands.getoutput and
82 def dak_getstatusoutput(cmd):
83 pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
84 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
86 output = pipe.stdout.read()
90 if output[-1:] == '\n':
98 commands.getstatusoutput = dak_getstatusoutput
100 ################################################################################
103 """ Escape html chars """
104 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
106 ################################################################################
108 def open_file(filename, mode='r'):
110 Open C{file}, return fileobject.
112 @type filename: string
113 @param filename: path/filename to open
116 @param mode: open mode
119 @return: open fileobject
121 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
125 f = open(filename, mode)
127 raise CantOpenError(filename)
130 ################################################################################
132 def our_raw_input(prompt=""):
136 sys.stdout.write(prompt)
145 sys.stderr.write("\nUser interrupt (^D).\n")
148 ################################################################################
150 def extract_component_from_section(section, session=None):
153 if section.find('/') != -1:
154 component = section.split('/')[0]
156 # Expand default component
158 comp = get_component(section, session)
162 component = comp.component_name
164 return (section, component)
166 ################################################################################
168 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
169 require_signature = True
172 require_signature = False
174 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
175 contents = signed_file.contents
180 # Split the lines in the input, keeping the linebreaks.
181 lines = contents.splitlines(True)
184 raise ParseChangesError("[Empty changes file]")
186 # Reindex by line number so we can easily verify the format of
192 indexed_lines[index] = line[:-1]
194 num_of_lines = len(indexed_lines.keys())
197 while index < num_of_lines:
199 line = indexed_lines[index]
200 if line == "" and signing_rules == 1:
201 if index != num_of_lines:
202 raise InvalidDscError(index)
204 slf = re_single_line_field.match(line)
206 field = slf.groups()[0].lower()
207 changes[field] = slf.groups()[1]
211 changes[field] += '\n'
213 mlf = re_multi_line_field.match(line)
216 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
217 if first == 1 and changes[field] != "":
218 changes[field] += '\n'
220 changes[field] += mlf.groups()[0] + '\n'
224 changes["filecontents"] = armored_contents
226 if changes.has_key("source"):
227 # Strip the source version in brackets from the source field,
228 # put it in the "source-version" field instead.
229 srcver = re_srchasver.search(changes["source"])
231 changes["source"] = srcver.group(1)
232 changes["source-version"] = srcver.group(2)
235 raise ParseChangesError(error)
239 ################################################################################
241 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
243 Parses a changes file and returns a dictionary where each field is a
244 key. The mandatory first argument is the filename of the .changes
247 signing_rules is an optional argument:
249 - If signing_rules == -1, no signature is required.
250 - If signing_rules == 0 (the default), a signature is required.
251 - If signing_rules == 1, it turns on the same strict format checking
254 The rules for (signing_rules == 1)-mode are:
256 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
257 followed by any PGP header data and must end with a blank line.
259 - The data section must end with a blank line and must be followed by
260 "-----BEGIN PGP SIGNATURE-----".
263 with open_file(filename) as changes_in:
264 content = changes_in.read()
266 unicode(content, 'utf-8')
268 raise ChangesUnicodeError("Changes file not proper utf-8")
269 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
273 # Finally ensure that everything needed for .changes is there
274 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
275 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
278 for keyword in must_keywords:
279 if not changes.has_key(keyword.lower()):
280 missingfields.append(keyword)
282 if len(missingfields):
283 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
287 ################################################################################
289 def hash_key(hashname):
290 return '%ssum' % hashname
292 ################################################################################
294 def create_hash(where, files, hashname, hashfunc):
296 create_hash extends the passed files dict with the given hash by
297 iterating over all files on disk and passing them to the hashing
302 for f in files.keys():
304 file_handle = open_file(f)
305 except CantOpenError:
306 rejmsg.append("Could not open file %s for checksumming" % (f))
309 files[f][hash_key(hashname)] = hashfunc(file_handle)
314 ################################################################################
316 def check_hash(where, files, hashname, hashfunc):
318 check_hash checks the given hash in the files dict against the actual
319 files on disk. The hash values need to be present consistently in
320 all file entries. It does not modify its input in any way.
324 for f in files.keys():
326 with open_file(f) as file_handle:
327 # Check for the hash entry, to not trigger a KeyError.
328 if not files[f].has_key(hash_key(hashname)):
329 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
333 # Actually check the hash for correctness.
334 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
335 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
337 except CantOpenError:
338 # TODO: This happens when the file is in the pool.
339 # warn("Cannot open file %s" % f)
343 ################################################################################
345 def check_size(where, files):
347 check_size checks the file sizes in the passed files dict against the
352 for f in files.keys():
355 except OSError as exc:
356 if exc.errno == errno.ENOENT:
357 # TODO: This happens when the file is in the pool.
361 actual_size = entry[stat.ST_SIZE]
362 size = int(files[f]["size"])
363 if size != actual_size:
364 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
365 % (f, actual_size, size, where))
368 ################################################################################
370 def check_dsc_files(dsc_filename, dsc, dsc_files):
372 Verify that the files listed in the Files field of the .dsc are
373 those expected given the announced Format.
375 @type dsc_filename: string
376 @param dsc_filename: path of .dsc file
379 @param dsc: the content of the .dsc parsed by C{parse_changes()}
381 @type dsc_files: dict
382 @param dsc_files: the file list returned by C{build_file_list()}
385 @return: all errors detected
389 # Ensure .dsc lists proper set of source files according to the format
391 has = defaultdict(lambda: 0)
394 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
395 (r'diff.gz', ('debian_diff',)),
396 (r'tar.gz', ('native_tar_gz', 'native_tar')),
397 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
398 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
399 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
400 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
404 m = re_issource.match(f)
406 rejmsg.append("%s: %s in Files field not recognised as source."
410 # Populate 'has' dictionary by resolving keys in lookup table
412 for regex, keys in ftype_lookup:
413 if re.match(regex, m.group(3)):
419 # File does not match anything in lookup table; reject
421 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
423 # Check for multiple files
424 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
425 if has[file_type] > 1:
426 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
428 # Source format specific tests
430 format = get_format_from_string(dsc['format'])
432 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
435 except UnknownFormatError:
436 # Not an error here for now
441 ################################################################################
443 def check_hash_fields(what, manifest):
445 check_hash_fields ensures that there are no checksum fields in the
446 given dict that we do not know about.
450 hashes = map(lambda x: x[0], known_hashes)
451 for field in manifest:
452 if field.startswith("checksums-"):
453 hashname = field.split("-",1)[1]
454 if hashname not in hashes:
455 rejmsg.append("Unsupported checksum field for %s "\
456 "in %s" % (hashname, what))
459 ################################################################################
461 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
462 if format >= version:
463 # The version should contain the specified hash.
466 # Import hashes from the changes
467 rejmsg = parse_checksums(".changes", files, changes, hashname)
471 # We need to calculate the hash because it can't possibly
474 return func(".changes", files, hashname, hashfunc)
476 # We could add the orig which might be in the pool to the files dict to
477 # access the checksums easily.
479 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
481 ensure_dsc_hashes' task is to ensure that each and every *present* hash
482 in the dsc is correct, i.e. identical to the changes file and if necessary
483 the pool. The latter task is delegated to check_hash.
487 if not dsc.has_key('Checksums-%s' % (hashname,)):
489 # Import hashes from the dsc
490 parse_checksums(".dsc", dsc_files, dsc, hashname)
492 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
495 ################################################################################
497 def parse_checksums(where, files, manifest, hashname):
499 field = 'checksums-%s' % hashname
500 if not field in manifest:
502 for line in manifest[field].split('\n'):
505 clist = line.strip().split(' ')
507 checksum, size, checkfile = clist
509 rejmsg.append("Cannot parse checksum line [%s]" % (line))
511 if not files.has_key(checkfile):
512 # TODO: check for the file's entry in the original files dict, not
513 # the one modified by (auto)byhand and other weird stuff
514 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
515 # (file, hashname, where))
517 if not files[checkfile]["size"] == size:
518 rejmsg.append("%s: size differs for files and checksums-%s entry "\
519 "in %s" % (checkfile, hashname, where))
521 files[checkfile][hash_key(hashname)] = checksum
522 for f in files.keys():
523 if not files[f].has_key(hash_key(hashname)):
524 rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
527 ################################################################################
529 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
531 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
534 # Make sure we have a Files: field to parse...
535 if not changes.has_key(field):
536 raise NoFilesFieldError
538 # Validate .changes Format: field
540 validate_changes_format(parse_format(changes['format']), field)
542 includes_section = (not is_a_dsc) and field == "files"
544 # Parse each entry/line:
545 for i in changes[field].split('\n'):
549 section = priority = ""
552 (md5, size, section, priority, name) = s
554 (md5, size, name) = s
556 raise ParseChangesError(i)
563 (section, component) = extract_component_from_section(section)
565 files[name] = dict(size=size, section=section,
566 priority=priority, component=component)
567 files[name][hashname] = md5
571 ################################################################################
573 # see http://bugs.debian.org/619131
574 def build_package_list(dsc, session = None):
575 if not dsc.has_key("package-list"):
580 for line in dsc["package-list"].split("\n"):
584 fields = line.split()
586 package_type = fields[1]
587 (section, component) = extract_component_from_section(fields[2])
590 # Validate type if we have a session
591 if session and get_override_type(package_type, session) is None:
592 # Maybe just warn and ignore? exit(1) might be a bit hard...
593 utils.fubar("invalid type (%s) in Package-List." % (package_type))
595 if name not in packages or packages[name]["type"] == "dsc":
596 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
600 ################################################################################
602 def send_mail (message, filename="", whitelists=None):
603 """sendmail wrapper, takes _either_ a message string or a file as arguments
605 @type whitelists: list of (str or None)
606 @param whitelists: path to whitelists. C{None} or an empty list whitelists
607 everything, otherwise an address is whitelisted if it is
608 included in any of the lists.
609 In addition a global whitelist can be specified in
610 Dinstall::MailWhiteList.
613 maildir = Cnf.get('Dir::Mail')
615 path = os.path.join(maildir, datetime.datetime.now().isoformat())
616 path = find_next_free(path)
617 with open(path, 'w') as fh:
620 # Check whether we're supposed to be sending mail
621 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
624 # If we've been passed a string dump it into a temporary file
626 (fd, filename) = tempfile.mkstemp()
627 os.write (fd, message)
630 if whitelists is None or None in whitelists:
632 if Cnf.get('Dinstall::MailWhiteList', ''):
633 whitelists.append(Cnf['Dinstall::MailWhiteList'])
634 if len(whitelists) != 0:
635 with open_file(filename) as message_in:
636 message_raw = modemail.message_from_file(message_in)
639 for path in whitelists:
640 with open_file(path, 'r') as whitelist_in:
641 for line in whitelist_in:
642 if not re_whitespace_comment.match(line):
643 if re_re_mark.match(line):
644 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
646 whitelist.append(re.compile(re.escape(line.strip())))
649 fields = ["To", "Bcc", "Cc"]
652 value = message_raw.get(field, None)
655 for item in value.split(","):
656 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
662 if not mail_whitelisted:
663 print "Skipping {0} since it's not whitelisted".format(item)
667 # Doesn't have any mail in whitelist so remove the header
669 del message_raw[field]
671 message_raw.replace_header(field, ', '.join(match))
673 # Change message fields in order if we don't have a To header
674 if not message_raw.has_key("To"):
677 if message_raw.has_key(field):
678 message_raw[fields[-1]] = message_raw[field]
679 del message_raw[field]
682 # Clean up any temporary files
683 # and return, as we removed all recipients.
685 os.unlink (filename);
688 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
689 os.write (fd, message_raw.as_string(True));
693 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
695 raise SendmailFailedError(output)
697 # Clean up any temporary files
701 ################################################################################
703 def poolify (source, component=None):
704 if source[:3] == "lib":
705 return source[:4] + '/' + source + '/'
707 return source[:1] + '/' + source + '/'
709 ################################################################################
711 def move (src, dest, overwrite = 0, perms = 0o664):
712 if os.path.exists(dest) and os.path.isdir(dest):
715 dest_dir = os.path.dirname(dest)
716 if not os.path.lexists(dest_dir):
717 umask = os.umask(00000)
718 os.makedirs(dest_dir, 0o2775)
720 #print "Moving %s to %s..." % (src, dest)
721 if os.path.exists(dest) and os.path.isdir(dest):
722 dest += '/' + os.path.basename(src)
723 # Don't overwrite unless forced to
724 if os.path.lexists(dest):
726 fubar("Can't move %s to %s - file already exists." % (src, dest))
728 if not os.access(dest, os.W_OK):
729 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
730 shutil.copy2(src, dest)
731 os.chmod(dest, perms)
734 def copy (src, dest, overwrite = 0, perms = 0o664):
735 if os.path.exists(dest) and os.path.isdir(dest):
738 dest_dir = os.path.dirname(dest)
739 if not os.path.exists(dest_dir):
740 umask = os.umask(00000)
741 os.makedirs(dest_dir, 0o2775)
743 #print "Copying %s to %s..." % (src, dest)
744 if os.path.exists(dest) and os.path.isdir(dest):
745 dest += '/' + os.path.basename(src)
746 # Don't overwrite unless forced to
747 if os.path.lexists(dest):
749 raise FileExistsError
751 if not os.access(dest, os.W_OK):
752 raise CantOverwriteError
753 shutil.copy2(src, dest)
754 os.chmod(dest, perms)
756 ################################################################################
758 def which_conf_file ():
759 if os.getenv('DAK_CONFIG'):
760 return os.getenv('DAK_CONFIG')
762 res = socket.getfqdn()
763 # In case we allow local config files per user, try if one exists
764 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
765 homedir = os.getenv("HOME")
766 confpath = os.path.join(homedir, "/etc/dak.conf")
767 if os.path.exists(confpath):
768 apt_pkg.read_config_file_isc(Cnf,confpath)
770 # We are still in here, so there is no local config file or we do
771 # not allow local files. Do the normal stuff.
772 if Cnf.get("Config::" + res + "::DakConfig"):
773 return Cnf["Config::" + res + "::DakConfig"]
775 return default_config
777 ################################################################################
779 def TemplateSubst(subst_map, filename):
780 """ Perform a substition of template """
781 with open_file(filename) as templatefile:
782 template = templatefile.read()
783 for k, v in subst_map.iteritems():
784 template = template.replace(k, str(v))
787 ################################################################################
789 def fubar(msg, exit_code=1):
790 sys.stderr.write("E: %s\n" % (msg))
794 sys.stderr.write("W: %s\n" % (msg))
796 ################################################################################
798 # Returns the user name with a laughable attempt at rfc822 conformancy
799 # (read: removing stray periods).
801 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
804 return pwd.getpwuid(os.getuid())[0]
806 ################################################################################
816 return ("%d%s" % (c, t))
818 ################################################################################
820 def cc_fix_changes (changes):
821 o = changes.get("architecture", "")
823 del changes["architecture"]
824 changes["architecture"] = {}
826 changes["architecture"][j] = 1
828 def changes_compare (a, b):
829 """ Sort by source name, source version, 'have source', and then by filename """
831 a_changes = parse_changes(a)
836 b_changes = parse_changes(b)
840 cc_fix_changes (a_changes)
841 cc_fix_changes (b_changes)
843 # Sort by source name
844 a_source = a_changes.get("source")
845 b_source = b_changes.get("source")
846 q = cmp (a_source, b_source)
850 # Sort by source version
851 a_version = a_changes.get("version", "0")
852 b_version = b_changes.get("version", "0")
853 q = apt_pkg.version_compare(a_version, b_version)
857 # Sort by 'have source'
858 a_has_source = a_changes["architecture"].get("source")
859 b_has_source = b_changes["architecture"].get("source")
860 if a_has_source and not b_has_source:
862 elif b_has_source and not a_has_source:
865 # Fall back to sort by filename
868 ################################################################################
870 def find_next_free (dest, too_many=100):
873 while os.path.lexists(dest) and extra < too_many:
874 dest = orig_dest + '.' + repr(extra)
876 if extra >= too_many:
877 raise NoFreeFilenameError
880 ################################################################################
882 def result_join (original, sep = '\t'):
884 for i in xrange(len(original)):
885 if original[i] == None:
886 resultlist.append("")
888 resultlist.append(original[i])
889 return sep.join(resultlist)
891 ################################################################################
893 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
895 for line in str.split('\n'):
897 if line or include_blank_lines:
898 out += "%s%s\n" % (prefix, line)
899 # Strip trailing new line
904 ################################################################################
906 def validate_changes_file_arg(filename, require_changes=1):
908 'filename' is either a .changes or .dak file. If 'filename' is a
909 .dak file, it's changed to be the corresponding .changes file. The
910 function then checks if the .changes file a) exists and b) is
911 readable and returns the .changes filename if so. If there's a
912 problem, the next action depends on the option 'require_changes'
915 - If 'require_changes' == -1, errors are ignored and the .changes
916 filename is returned.
917 - If 'require_changes' == 0, a warning is given and 'None' is returned.
918 - If 'require_changes' == 1, a fatal error is raised.
923 orig_filename = filename
924 if filename.endswith(".dak"):
925 filename = filename[:-4]+".changes"
927 if not filename.endswith(".changes"):
928 error = "invalid file type; not a changes file"
930 if not os.access(filename,os.R_OK):
931 if os.path.exists(filename):
932 error = "permission denied"
934 error = "file not found"
937 if require_changes == 1:
938 fubar("%s: %s." % (orig_filename, error))
939 elif require_changes == 0:
940 warn("Skipping %s - %s" % (orig_filename, error))
942 else: # We only care about the .dak file
947 ################################################################################
950 return (arch != "source" and arch != "all")
952 ################################################################################
954 def join_with_commas_and(list):
955 if len(list) == 0: return "nothing"
956 if len(list) == 1: return list[0]
957 return ", ".join(list[:-1]) + " and " + list[-1]
959 ################################################################################
964 (pkg, version, constraint) = atom
966 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
969 pp_deps.append(pp_dep)
970 return " |".join(pp_deps)
972 ################################################################################
977 ################################################################################
979 def parse_args(Options):
980 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
981 # XXX: This should go away and everything which calls it be converted
982 # to use SQLA properly. For now, we'll just fix it not to use
983 # the old Pg interface though
984 session = DBConn().session()
988 for suitename in split_args(Options["Suite"]):
989 suite = get_suite(suitename, session=session)
990 if not suite or suite.suite_id is None:
991 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
993 suite_ids_list.append(suite.suite_id)
995 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
997 fubar("No valid suite given.")
1002 if Options["Component"]:
1003 component_ids_list = []
1004 for componentname in split_args(Options["Component"]):
1005 component = get_component(componentname, session=session)
1006 if component is None:
1007 warn("component '%s' not recognised." % (componentname))
1009 component_ids_list.append(component.component_id)
1010 if component_ids_list:
1011 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1013 fubar("No valid component given.")
1017 # Process architecture
1018 con_architectures = ""
1020 if Options["Architecture"]:
1022 for archname in split_args(Options["Architecture"]):
1023 if archname == "source":
1026 arch = get_architecture(archname, session=session)
1028 warn("architecture '%s' not recognised." % (archname))
1030 arch_ids_list.append(arch.arch_id)
1032 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1034 if not check_source:
1035 fubar("No valid architecture given.")
1039 return (con_suites, con_architectures, con_components, check_source)
1041 ################################################################################
1043 def arch_compare_sw (a, b):
1045 Function for use in sorting lists of architectures.
1047 Sorts normally except that 'source' dominates all others.
1050 if a == "source" and b == "source":
1059 ################################################################################
1061 def split_args (s, dwim=1):
1063 Split command line arguments which can be separated by either commas
1064 or whitespace. If dwim is set, it will complain about string ending
1065 in comma since this usually means someone did 'dak ls -a i386, m68k
1066 foo' or something and the inevitable confusion resulting from 'm68k'
1067 being treated as an argument is undesirable.
1070 if s.find(",") == -1:
1073 if s[-1:] == "," and dwim:
1074 fubar("split_args: found trailing comma, spurious space maybe?")
1077 ################################################################################
1079 def gpgv_get_status_output(cmd, status_read, status_write):
1081 Our very own version of commands.getouputstatus(), hacked to support
1085 cmd = ['/bin/sh', '-c', cmd]
1086 p2cread, p2cwrite = os.pipe()
1087 c2pread, c2pwrite = os.pipe()
1088 errout, errin = os.pipe()
1098 for i in range(3, 256):
1099 if i != status_write:
1105 os.execvp(cmd[0], cmd)
1111 os.dup2(c2pread, c2pwrite)
1112 os.dup2(errout, errin)
1114 output = status = ""
1116 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1119 r = os.read(fd, 8196)
1121 more_data.append(fd)
1122 if fd == c2pwrite or fd == errin:
1124 elif fd == status_read:
1127 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1129 pid, exit_status = os.waitpid(pid, 0)
1131 os.close(status_write)
1132 os.close(status_read)
1142 return output, status, exit_status
1144 ################################################################################
1146 def process_gpgv_output(status):
1147 # Process the status-fd output
1150 for line in status.split('\n'):
1154 split = line.split()
1156 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1158 (gnupg, keyword) = split[:2]
1159 if gnupg != "[GNUPG:]":
1160 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1163 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1164 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1167 keywords[keyword] = args
1169 return (keywords, internal_error)
1171 ################################################################################
1173 def retrieve_key (filename, keyserver=None, keyring=None):
1175 Retrieve the key that signed 'filename' from 'keyserver' and
1176 add it to 'keyring'. Returns nothing on success, or an error message
1180 # Defaults for keyserver and keyring
1182 keyserver = Cnf["Dinstall::KeyServer"]
1184 keyring = get_primary_keyring_path()
1186 # Ensure the filename contains no shell meta-characters or other badness
1187 if not re_taint_free.match(filename):
1188 return "%s: tainted filename" % (filename)
1190 # Invoke gpgv on the file
1191 status_read, status_write = os.pipe()
1192 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1193 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1195 # Process the status-fd output
1196 (keywords, internal_error) = process_gpgv_output(status)
1198 return internal_error
1200 if not keywords.has_key("NO_PUBKEY"):
1201 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1203 fingerprint = keywords["NO_PUBKEY"][0]
1204 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1205 # it'll try to create a lockfile in /dev. A better solution might
1206 # be a tempfile or something.
1207 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1208 % (Cnf["Dinstall::SigningKeyring"])
1209 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1210 % (keyring, keyserver, fingerprint)
1211 (result, output) = commands.getstatusoutput(cmd)
1213 return "'%s' failed with exit code %s" % (cmd, result)
1217 ################################################################################
1219 def gpg_keyring_args(keyrings=None):
1221 keyrings = get_active_keyring_paths()
1223 return " ".join(["--keyring %s" % x for x in keyrings])
1225 ################################################################################
1227 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1229 Check the signature of a file and return the fingerprint if the
1230 signature is valid or 'None' if it's not. The first argument is the
1231 filename whose signature should be checked. The second argument is a
1232 reject function and is called when an error is found. The reject()
1233 function must allow for two arguments: the first is the error message,
1234 the second is an optional prefix string. It's possible for reject()
1235 to be called more than once during an invocation of check_signature().
1236 The third argument is optional and is the name of the files the
1237 detached signature applies to. The fourth argument is optional and is
1238 a *list* of keyrings to use. 'autofetch' can either be None, True or
1239 False. If None, the default behaviour specified in the config will be
1245 # Ensure the filename contains no shell meta-characters or other badness
1246 if not re_taint_free.match(sig_filename):
1247 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1248 return (None, rejects)
1250 if data_filename and not re_taint_free.match(data_filename):
1251 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1252 return (None, rejects)
1255 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1257 # Autofetch the signing key if that's enabled
1258 if autofetch == None:
1259 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1261 error_msg = retrieve_key(sig_filename)
1263 rejects.append(error_msg)
1264 return (None, rejects)
1266 # Build the command line
1267 status_read, status_write = os.pipe()
1268 cmd = "gpgv --status-fd %s %s %s %s" % (
1269 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1271 # Invoke gpgv on the file
1272 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1274 # Process the status-fd output
1275 (keywords, internal_error) = process_gpgv_output(status)
1277 # If we failed to parse the status-fd output, let's just whine and bail now
1279 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1280 rejects.append(internal_error, "")
1281 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1282 return (None, rejects)
1284 # Now check for obviously bad things in the processed output
1285 if keywords.has_key("KEYREVOKED"):
1286 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1287 if keywords.has_key("BADSIG"):
1288 rejects.append("bad signature on %s." % (sig_filename))
1289 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1290 rejects.append("failed to check signature on %s." % (sig_filename))
1291 if keywords.has_key("NO_PUBKEY"):
1292 args = keywords["NO_PUBKEY"]
1295 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1296 if keywords.has_key("BADARMOR"):
1297 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1298 if keywords.has_key("NODATA"):
1299 rejects.append("no signature found in %s." % (sig_filename))
1300 if keywords.has_key("EXPKEYSIG"):
1301 args = keywords["EXPKEYSIG"]
1304 rejects.append("Signature made by expired key 0x%s" % (key))
1305 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1306 args = keywords["KEYEXPIRED"]
1310 if timestamp.count("T") == 0:
1312 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1314 expiredate = "unknown (%s)" % (timestamp)
1316 expiredate = timestamp
1317 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1319 if len(rejects) > 0:
1320 return (None, rejects)
1322 # Next check gpgv exited with a zero return code
1324 rejects.append("gpgv failed while checking %s." % (sig_filename))
1326 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1328 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1329 return (None, rejects)
1331 # Sanity check the good stuff we expect
1332 if not keywords.has_key("VALIDSIG"):
1333 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1335 args = keywords["VALIDSIG"]
1337 rejects.append("internal error while checking signature on %s." % (sig_filename))
1339 fingerprint = args[0]
1340 if not keywords.has_key("GOODSIG"):
1341 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1342 if not keywords.has_key("SIG_ID"):
1343 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1345 # Finally ensure there's not something we don't recognise
1346 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1347 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1348 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1350 for keyword in keywords.keys():
1351 if not known_keywords.has_key(keyword):
1352 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1354 if len(rejects) > 0:
1355 return (None, rejects)
1357 return (fingerprint, [])
1359 ################################################################################
1361 def gpg_get_key_addresses(fingerprint):
1362 """retreive email addresses from gpg key uids for a given fingerprint"""
1363 addresses = key_uid_email_cache.get(fingerprint)
1364 if addresses != None:
1368 with open(os.devnull, "wb") as devnull:
1369 output = daklib.daksubprocess.check_output(
1370 ["gpg", "--no-default-keyring"] + gpg_keyring_args().split() +
1371 ["--with-colons", "--list-keys", fingerprint], stderr=devnull)
1372 except subprocess.CalledProcessError:
1375 for l in output.split('\n'):
1376 parts = l.split(':')
1377 if parts[0] not in ("uid", "pub"):
1384 uid = codecs.decode(uid.decode("utf-8"), "unicode_escape")
1385 except UnicodeDecodeError:
1386 uid = uid.decode("latin1") # does not fail
1387 m = re_parse_maintainer.match(uid)
1390 address = m.group(2)
1391 address = address.encode("utf8") # dak still uses bytes
1392 if address.endswith('@debian.org'):
1393 # prefer @debian.org addresses
1394 # TODO: maybe not hardcode the domain
1395 addresses.insert(0, address)
1397 addresses.append(address)
1398 key_uid_email_cache[fingerprint] = addresses
1401 ################################################################################
1403 def get_logins_from_ldap(fingerprint='*'):
1404 """retrieve login from LDAP linked to a given fingerprint"""
1406 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
1407 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
1408 l = ldap.open(LDAPServer)
1409 l.simple_bind_s('','')
1410 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1411 '(keyfingerprint=%s)' % fingerprint,
1412 ['uid', 'keyfingerprint'])
1415 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
1418 ################################################################################
1420 def get_users_from_ldap():
1421 """retrieve login and user names from LDAP"""
1423 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
1424 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
1425 l = ldap.open(LDAPServer)
1426 l.simple_bind_s('','')
1427 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
1428 '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
1433 for k in ('cn', 'mn', 'sn'):
1435 if elem[k][0] != '-':
1436 name.append(elem[k][0])
1439 users[' '.join(name)] = elem['uid'][0]
1442 ################################################################################
1444 def clean_symlink (src, dest, root):
1446 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1449 src = src.replace(root, '', 1)
1450 dest = dest.replace(root, '', 1)
1451 dest = os.path.dirname(dest)
1452 new_src = '../' * len(dest.split('/'))
1453 return new_src + src
1455 ################################################################################
1457 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
1459 Return a secure and unique filename by pre-creating it.
1461 @type directory: str
1462 @param directory: If non-null it will be the directory the file is pre-created in.
1465 @param prefix: The filename will be prefixed with this string
1468 @param suffix: The filename will end with this string
1471 @param mode: If set the file will get chmodded to those permissions
1474 @param group: If set the file will get chgrped to the specified group.
1477 @return: Returns a pair (fd, name)
1480 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
1482 os.chmod(tfname, mode)
1484 gid = grp.getgrnam(group).gr_gid
1485 os.chown(tfname, -1, gid)
1486 return (tfd, tfname)
1488 ################################################################################
1490 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
1492 Return a secure and unique directory by pre-creating it.
1495 @param parent: If non-null it will be the directory the directory is pre-created in.
1498 @param prefix: The filename will be prefixed with this string
1501 @param suffix: The filename will end with this string
1504 @param mode: If set the file will get chmodded to those permissions
1507 @param group: If set the file will get chgrped to the specified group.
1510 @return: Returns a pair (fd, name)
1514 tfname = tempfile.mkdtemp(suffix, prefix, parent)
1516 os.chmod(tfname, mode)
1518 gid = grp.getgrnam(group).gr_gid
1519 os.chown(tfname, -1, gid)
1522 ################################################################################
1524 def is_email_alias(email):
1525 """ checks if the user part of the email is listed in the alias file """
1527 if alias_cache == None:
1528 aliasfn = which_alias_file()
1531 for l in open(aliasfn):
1532 alias_cache.add(l.split(':')[0])
1533 uid = email.split('@')[0]
1534 return uid in alias_cache
1536 ################################################################################
1538 def get_changes_files(from_dir):
1540 Takes a directory and lists all .changes files in it (as well as chdir'ing
1541 to the directory; this is due to broken behaviour on the part of p-u/p-a
1542 when you're not in the right place)
1544 Returns a list of filenames
1547 # Much of the rest of p-u/p-a depends on being in the right place
1549 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1550 except OSError as e:
1551 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1553 return changes_files
1555 ################################################################################
1557 Cnf = config.Config().Cnf
1559 ################################################################################
1561 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1563 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1564 Well, actually it parsed a local copy, but let's document the source
1567 returns a dict associating source package name with a list of open wnpp
1568 bugs (Yes, there might be more than one)
1574 lines = f.readlines()
1575 except IOError as e:
1576 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1581 splited_line = line.split(": ", 1)
1582 if len(splited_line) > 1:
1583 wnpp[splited_line[0]] = splited_line[1].split("|")
1585 for source in wnpp.keys():
1587 for wnpp_bug in wnpp[source]:
1588 bug_no = re.search("(\d)+", wnpp_bug).group()
1594 ################################################################################
1596 def get_packages_from_ftp(root, suite, component, architecture):
1598 Returns an object containing apt_pkg-parseable data collected by
1599 aggregating Packages.gz files gathered for each architecture.
1602 @param root: path to ftp archive root directory
1605 @param suite: suite to extract files from
1607 @type component: string
1608 @param component: component to extract files from
1610 @type architecture: string
1611 @param architecture: architecture to extract files from
1614 @return: apt_pkg class containing package data
1616 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1617 (fd, temp_file) = temp_filename()
1618 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1620 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1621 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1622 if os.path.exists(filename):
1623 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1625 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1626 packages = open_file(temp_file)
1627 Packages = apt_pkg.TagFile(packages)
1628 os.unlink(temp_file)
1631 ################################################################################
1633 def deb_extract_control(fh):
1634 """extract DEBIAN/control from a binary package"""
1635 return apt_inst.DebFile(fh).control.extractdata("control")
1637 ################################################################################
1639 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1640 """mail addresses to contact for an upload
1642 @type maintainer: str
1643 @param maintainer: Maintainer field of the .changes file
1645 @type changed_by: str
1646 @param changed_by: Changed-By field of the .changes file
1648 @type fingerprint: str
1649 @param fingerprint: fingerprint of the key used to sign the upload
1652 @return: list of RFC 2047-encoded mail addresses to contact regarding
1655 addresses = [maintainer]
1656 if changed_by != maintainer:
1657 addresses.append(changed_by)
1659 fpr_addresses = gpg_get_key_addresses(fingerprint)
1660 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1661 addresses.append(fpr_addresses[0])
1663 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1664 return encoded_addresses
1666 ################################################################################
1668 def call_editor(text="", suffix=".txt"):
1669 """run editor and return the result as a string
1672 @param text: initial text
1675 @param suffix: extension for temporary file
1678 @return: string with the edited text
1680 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1681 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1685 daklib.daksubprocess.check_call([editor, tmp.name])
1686 return open(tmp.name, 'r').read()
1690 ################################################################################
1692 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False):
1693 dbsuite = get_suite(suite, session)
1694 overridesuite = dbsuite
1695 if dbsuite.overridesuite is not None:
1696 overridesuite = get_suite(dbsuite.overridesuite, session)
1701 all_arches = set(arches)
1703 all_arches = set([x.arch_string for x in get_suite_architectures(suite)])
1704 all_arches -= set(["source", "all"])
1705 metakey_d = get_or_set_metadatakey("Depends", session)
1706 metakey_p = get_or_set_metadatakey("Provides", session)
1708 'suite_id': dbsuite.suite_id,
1709 'metakey_d_id': metakey_d.key_id,
1710 'metakey_p_id': metakey_p.key_id,
1712 for architecture in all_arches | set(['all']):
1715 virtual_packages = {}
1716 params['arch_id'] = get_architecture(architecture, session).arch_id
1719 SELECT b.id, b.package, s.source, c.name as component,
1720 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1721 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1723 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1724 JOIN source s ON b.source = s.id
1725 JOIN files_archive_map af ON b.file = af.file_id
1726 JOIN component c ON af.component_id = c.id
1727 WHERE b.architecture = :arch_id'''
1728 query = session.query('id', 'package', 'source', 'component', 'depends', 'provides'). \
1729 from_statement(statement).params(params)
1730 for binary_id, package, source, component, depends, provides in query:
1731 sources[package] = source
1732 p2c[package] = component
1733 if depends is not None:
1734 deps[package] = depends
1735 # Maintain a counter for each virtual package. If a
1736 # Provides: exists, set the counter to 0 and count all
1737 # provides by a package not in the list for removal.
1738 # If the counter stays 0 at the end, we know that only
1739 # the to-be-removed packages provided this virtual
1741 if provides is not None:
1742 for virtual_pkg in provides.split(","):
1743 virtual_pkg = virtual_pkg.strip()
1744 if virtual_pkg == package: continue
1745 if not virtual_packages.has_key(virtual_pkg):
1746 virtual_packages[virtual_pkg] = 0
1747 if package not in removals:
1748 virtual_packages[virtual_pkg] += 1
1750 # If a virtual package is only provided by the to-be-removed
1751 # packages, treat the virtual package as to-be-removed too.
1752 for virtual_pkg in virtual_packages.keys():
1753 if virtual_packages[virtual_pkg] == 0:
1754 removals.append(virtual_pkg)
1756 # Check binary dependencies (Depends)
1757 for package in deps.keys():
1758 if package in removals: continue
1761 parsed_dep += apt_pkg.parse_depends(deps[package])
1762 except ValueError as e:
1763 print "Error for package %s: %s" % (package, e)
1764 for dep in parsed_dep:
1765 # Check for partial breakage. If a package has a ORed
1766 # dependency, there is only a dependency problem if all
1767 # packages in the ORed depends will be removed.
1769 for dep_package, _, _ in dep:
1770 if dep_package in removals:
1772 if unsat == len(dep):
1773 component = p2c[package]
1774 source = sources[package]
1775 if component != "main":
1776 source = "%s/%s" % (source, component)
1777 all_broken.setdefault(source, {}).setdefault(package, set()).add(architecture)
1782 print " - broken Depends:"
1784 print "# Broken Depends:"
1785 for source, bindict in sorted(all_broken.items()):
1787 for binary, arches in sorted(bindict.items()):
1788 if arches == all_arches or 'all' in arches:
1789 lines.append(binary)
1791 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1793 print ' %s: %s' % (source, lines[0])
1795 print '%s: %s' % (source, lines[0])
1796 for line in lines[1:]:
1798 print ' ' + ' ' * (len(source) + 2) + line
1800 print ' ' * (len(source) + 2) + line
1804 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1806 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1807 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1809 'suite_id': dbsuite.suite_id,
1810 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
1813 SELECT s.id, s.source, string_agg(sm.value, ', ') as build_dep
1815 JOIN source_metadata sm ON s.id = sm.src_id
1817 (SELECT source FROM src_associations
1818 WHERE suite = :suite_id)
1819 AND sm.key_id in :metakey_ids
1820 GROUP BY s.id, s.source'''
1821 query = session.query('id', 'source', 'build_dep').from_statement(statement). \
1823 for source_id, source, build_dep in query:
1824 if source in removals: continue
1826 if build_dep is not None:
1827 # Remove [arch] information since we want to see breakage on all arches
1828 build_dep = re_build_dep_arch.sub("", build_dep)
1830 parsed_dep += apt_pkg.parse_depends(build_dep)
1831 except ValueError as e:
1832 print "Error for source %s: %s" % (source, e)
1833 for dep in parsed_dep:
1835 for dep_package, _, _ in dep:
1836 if dep_package in removals:
1838 if unsat == len(dep):
1839 component, = session.query(Component.component_name) \
1840 .join(Component.overrides) \
1841 .filter(Override.suite == overridesuite) \
1842 .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
1843 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1846 if component != "main":
1847 key = "%s/%s" % (source, component)
1848 all_broken.setdefault(key, set()).add(pp_deps(dep))
1853 print " - broken Build-Depends:"
1855 print "# Broken Build-Depends:"
1856 for source, bdeps in sorted(all_broken.items()):
1857 bdeps = sorted(bdeps)
1859 print ' %s: %s' % (source, bdeps[0])
1861 print '%s: %s' % (source, bdeps[0])
1862 for bdep in bdeps[1:]:
1864 print ' ' + ' ' * (len(source) + 2) + bdep
1866 print ' ' * (len(source) + 2) + bdep