2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
43 import email as modemail
48 import daklib.config as config
49 import daklib.daksubprocess
50 from dbconn import DBConn, get_architecture, get_component, get_suite, \
51 get_override_type, Keyring, session_wrapper, \
52 get_active_keyring_paths, \
53 get_suite_architectures, get_or_set_metadatakey, DBSource, \
54 Component, Override, OverrideType
55 from sqlalchemy import desc
56 from dak_exceptions import *
57 from gpg import SignedFile
58 from textutils import fix_maintainer
59 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
60 re_multi_line_field, re_srchasver, re_taint_free, \
61 re_re_mark, re_whitespace_comment, re_issource, \
62 re_build_dep_arch, re_parse_maintainer
64 from formats import parse_format, validate_changes_format
65 from srcformats import get_format_from_string
66 from collections import defaultdict
68 ################################################################################
70 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
72 alias_cache = None #: Cache for email alias checks
73 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
75 # (hashname, function, earliest_changes_version)
76 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
77 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
79 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
80 # code in lenny's Python. This also affects commands.getoutput and
82 def dak_getstatusoutput(cmd):
83 pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
84 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
86 output = pipe.stdout.read()
90 if output[-1:] == '\n':
98 commands.getstatusoutput = dak_getstatusoutput
100 ################################################################################
103 """ Escape html chars """
104 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
106 ################################################################################
108 def open_file(filename, mode='r'):
110 Open C{file}, return fileobject.
112 @type filename: string
113 @param filename: path/filename to open
116 @param mode: open mode
119 @return: open fileobject
121 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
125 f = open(filename, mode)
127 raise CantOpenError(filename)
130 ################################################################################
132 def our_raw_input(prompt=""):
136 sys.stdout.write(prompt)
145 sys.stderr.write("\nUser interrupt (^D).\n")
148 ################################################################################
150 def extract_component_from_section(section, session=None):
153 if section.find('/') != -1:
154 component = section.split('/')[0]
156 # Expand default component
160 return (section, component)
162 ################################################################################
164 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
165 require_signature = True
168 require_signature = False
170 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
171 contents = signed_file.contents
176 # Split the lines in the input, keeping the linebreaks.
177 lines = contents.splitlines(True)
180 raise ParseChangesError("[Empty changes file]")
182 # Reindex by line number so we can easily verify the format of
188 indexed_lines[index] = line[:-1]
190 num_of_lines = len(indexed_lines.keys())
193 while index < num_of_lines:
195 line = indexed_lines[index]
196 if line == "" and signing_rules == 1:
197 if index != num_of_lines:
198 raise InvalidDscError(index)
200 slf = re_single_line_field.match(line)
202 field = slf.groups()[0].lower()
203 changes[field] = slf.groups()[1]
207 changes[field] += '\n'
209 mlf = re_multi_line_field.match(line)
212 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
213 if first == 1 and changes[field] != "":
214 changes[field] += '\n'
216 changes[field] += mlf.groups()[0] + '\n'
220 changes["filecontents"] = armored_contents
222 if changes.has_key("source"):
223 # Strip the source version in brackets from the source field,
224 # put it in the "source-version" field instead.
225 srcver = re_srchasver.search(changes["source"])
227 changes["source"] = srcver.group(1)
228 changes["source-version"] = srcver.group(2)
231 raise ParseChangesError(error)
235 ################################################################################
237 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
239 Parses a changes file and returns a dictionary where each field is a
240 key. The mandatory first argument is the filename of the .changes
243 signing_rules is an optional argument:
245 - If signing_rules == -1, no signature is required.
246 - If signing_rules == 0 (the default), a signature is required.
247 - If signing_rules == 1, it turns on the same strict format checking
250 The rules for (signing_rules == 1)-mode are:
252 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
253 followed by any PGP header data and must end with a blank line.
255 - The data section must end with a blank line and must be followed by
256 "-----BEGIN PGP SIGNATURE-----".
259 with open_file(filename) as changes_in:
260 content = changes_in.read()
262 unicode(content, 'utf-8')
264 raise ChangesUnicodeError("Changes file not proper utf-8")
265 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
269 # Finally ensure that everything needed for .changes is there
270 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
271 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
274 for keyword in must_keywords:
275 if not changes.has_key(keyword.lower()):
276 missingfields.append(keyword)
278 if len(missingfields):
279 raise ParseChangesError("Missing mandatory field(s) in changes file (policy 5.5): %s" % (missingfields))
283 ################################################################################
285 def hash_key(hashname):
286 return '%ssum' % hashname
288 ################################################################################
290 def check_dsc_files(dsc_filename, dsc, dsc_files):
292 Verify that the files listed in the Files field of the .dsc are
293 those expected given the announced Format.
295 @type dsc_filename: string
296 @param dsc_filename: path of .dsc file
299 @param dsc: the content of the .dsc parsed by C{parse_changes()}
301 @type dsc_files: dict
302 @param dsc_files: the file list returned by C{build_file_list()}
305 @return: all errors detected
309 # Ensure .dsc lists proper set of source files according to the format
311 has = defaultdict(lambda: 0)
314 (r'orig\.tar\.gz', ('orig_tar_gz', 'orig_tar')),
315 (r'diff\.gz', ('debian_diff',)),
316 (r'tar\.gz', ('native_tar_gz', 'native_tar')),
317 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
318 (r'orig\.tar\.(gz|bz2|xz)\.asc', ('orig_tar_sig',)),
319 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
320 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
321 (r'orig-.+\.tar\.(gz|bz2|xz)\.asc', ('more_orig_tar_sig',)),
322 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
326 m = re_issource.match(f)
328 rejmsg.append("%s: %s in Files field not recognised as source."
332 # Populate 'has' dictionary by resolving keys in lookup table
334 for regex, keys in ftype_lookup:
335 if re.match(regex, m.group(3)):
341 # File does not match anything in lookup table; reject
343 rejmsg.append("%s: unexpected source file '%s'" % (dsc_filename, f))
346 # Check for multiple files
347 for file_type in ('orig_tar', 'orig_tar_sig', 'native_tar', 'debian_tar', 'debian_diff'):
348 if has[file_type] > 1:
349 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
351 # Source format specific tests
353 format = get_format_from_string(dsc['format'])
355 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
358 except UnknownFormatError:
359 # Not an error here for now
364 ################################################################################
366 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
368 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
371 # Make sure we have a Files: field to parse...
372 if not changes.has_key(field):
373 raise NoFilesFieldError
375 # Validate .changes Format: field
377 validate_changes_format(parse_format(changes['format']), field)
379 includes_section = (not is_a_dsc) and field == "files"
381 # Parse each entry/line:
382 for i in changes[field].split('\n'):
386 section = priority = ""
389 (md5, size, section, priority, name) = s
391 (md5, size, name) = s
393 raise ParseChangesError(i)
400 (section, component) = extract_component_from_section(section)
402 files[name] = dict(size=size, section=section,
403 priority=priority, component=component)
404 files[name][hashname] = md5
408 ################################################################################
410 def send_mail (message, filename="", whitelists=None):
411 """sendmail wrapper, takes _either_ a message string or a file as arguments
413 @type whitelists: list of (str or None)
414 @param whitelists: path to whitelists. C{None} or an empty list whitelists
415 everything, otherwise an address is whitelisted if it is
416 included in any of the lists.
417 In addition a global whitelist can be specified in
418 Dinstall::MailWhiteList.
421 maildir = Cnf.get('Dir::Mail')
423 path = os.path.join(maildir, datetime.datetime.now().isoformat())
424 path = find_next_free(path)
425 with open(path, 'w') as fh:
428 # Check whether we're supposed to be sending mail
429 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
432 # If we've been passed a string dump it into a temporary file
434 (fd, filename) = tempfile.mkstemp()
435 os.write (fd, message)
438 if whitelists is None or None in whitelists:
440 if Cnf.get('Dinstall::MailWhiteList', ''):
441 whitelists.append(Cnf['Dinstall::MailWhiteList'])
442 if len(whitelists) != 0:
443 with open_file(filename) as message_in:
444 message_raw = modemail.message_from_file(message_in)
447 for path in whitelists:
448 with open_file(path, 'r') as whitelist_in:
449 for line in whitelist_in:
450 if not re_whitespace_comment.match(line):
451 if re_re_mark.match(line):
452 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
454 whitelist.append(re.compile(re.escape(line.strip())))
457 fields = ["To", "Bcc", "Cc"]
460 value = message_raw.get(field, None)
463 for item in value.split(","):
464 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
470 if not mail_whitelisted:
471 print "Skipping {0} since it's not whitelisted".format(item)
475 # Doesn't have any mail in whitelist so remove the header
477 del message_raw[field]
479 message_raw.replace_header(field, ', '.join(match))
481 # Change message fields in order if we don't have a To header
482 if not message_raw.has_key("To"):
485 if message_raw.has_key(field):
486 message_raw[fields[-1]] = message_raw[field]
487 del message_raw[field]
490 # Clean up any temporary files
491 # and return, as we removed all recipients.
493 os.unlink (filename);
496 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
497 os.write (fd, message_raw.as_string(True));
501 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
503 raise SendmailFailedError(output)
505 # Clean up any temporary files
509 ################################################################################
511 def poolify (source, component=None):
512 if source[:3] == "lib":
513 return source[:4] + '/' + source + '/'
515 return source[:1] + '/' + source + '/'
517 ################################################################################
519 def move (src, dest, overwrite = 0, perms = 0o664):
520 if os.path.exists(dest) and os.path.isdir(dest):
523 dest_dir = os.path.dirname(dest)
524 if not os.path.lexists(dest_dir):
525 umask = os.umask(00000)
526 os.makedirs(dest_dir, 0o2775)
528 #print "Moving %s to %s..." % (src, dest)
529 if os.path.exists(dest) and os.path.isdir(dest):
530 dest += '/' + os.path.basename(src)
531 # Don't overwrite unless forced to
532 if os.path.lexists(dest):
534 fubar("Can't move %s to %s - file already exists." % (src, dest))
536 if not os.access(dest, os.W_OK):
537 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
538 shutil.copy2(src, dest)
539 os.chmod(dest, perms)
542 def copy (src, dest, overwrite = 0, perms = 0o664):
543 if os.path.exists(dest) and os.path.isdir(dest):
546 dest_dir = os.path.dirname(dest)
547 if not os.path.exists(dest_dir):
548 umask = os.umask(00000)
549 os.makedirs(dest_dir, 0o2775)
551 #print "Copying %s to %s..." % (src, dest)
552 if os.path.exists(dest) and os.path.isdir(dest):
553 dest += '/' + os.path.basename(src)
554 # Don't overwrite unless forced to
555 if os.path.lexists(dest):
557 raise FileExistsError
559 if not os.access(dest, os.W_OK):
560 raise CantOverwriteError
561 shutil.copy2(src, dest)
562 os.chmod(dest, perms)
564 ################################################################################
566 def which_conf_file ():
567 if os.getenv('DAK_CONFIG'):
568 return os.getenv('DAK_CONFIG')
570 res = socket.getfqdn()
571 # In case we allow local config files per user, try if one exists
572 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
573 homedir = os.getenv("HOME")
574 confpath = os.path.join(homedir, "/etc/dak.conf")
575 if os.path.exists(confpath):
576 apt_pkg.read_config_file_isc(Cnf,confpath)
578 # We are still in here, so there is no local config file or we do
579 # not allow local files. Do the normal stuff.
580 if Cnf.get("Config::" + res + "::DakConfig"):
581 return Cnf["Config::" + res + "::DakConfig"]
583 return default_config
585 ################################################################################
587 def TemplateSubst(subst_map, filename):
588 """ Perform a substition of template """
589 with open_file(filename) as templatefile:
590 template = templatefile.read()
591 for k, v in subst_map.iteritems():
592 template = template.replace(k, str(v))
595 ################################################################################
597 def fubar(msg, exit_code=1):
598 sys.stderr.write("E: %s\n" % (msg))
602 sys.stderr.write("W: %s\n" % (msg))
604 ################################################################################
606 # Returns the user name with a laughable attempt at rfc822 conformancy
607 # (read: removing stray periods).
609 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
612 return pwd.getpwuid(os.getuid())[0]
614 ################################################################################
624 return ("%d%s" % (c, t))
626 ################################################################################
628 def find_next_free (dest, too_many=100):
631 while os.path.lexists(dest) and extra < too_many:
632 dest = orig_dest + '.' + repr(extra)
634 if extra >= too_many:
635 raise NoFreeFilenameError
638 ################################################################################
640 def result_join (original, sep = '\t'):
642 for i in xrange(len(original)):
643 if original[i] == None:
644 resultlist.append("")
646 resultlist.append(original[i])
647 return sep.join(resultlist)
649 ################################################################################
651 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
653 for line in str.split('\n'):
655 if line or include_blank_lines:
656 out += "%s%s\n" % (prefix, line)
657 # Strip trailing new line
662 ################################################################################
664 def join_with_commas_and(list):
665 if len(list) == 0: return "nothing"
666 if len(list) == 1: return list[0]
667 return ", ".join(list[:-1]) + " and " + list[-1]
669 ################################################################################
674 (pkg, version, constraint) = atom
676 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
679 pp_deps.append(pp_dep)
680 return " |".join(pp_deps)
682 ################################################################################
687 ################################################################################
689 def parse_args(Options):
690 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
691 # XXX: This should go away and everything which calls it be converted
692 # to use SQLA properly. For now, we'll just fix it not to use
693 # the old Pg interface though
694 session = DBConn().session()
698 for suitename in split_args(Options["Suite"]):
699 suite = get_suite(suitename, session=session)
700 if not suite or suite.suite_id is None:
701 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
703 suite_ids_list.append(suite.suite_id)
705 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
707 fubar("No valid suite given.")
712 if Options["Component"]:
713 component_ids_list = []
714 for componentname in split_args(Options["Component"]):
715 component = get_component(componentname, session=session)
716 if component is None:
717 warn("component '%s' not recognised." % (componentname))
719 component_ids_list.append(component.component_id)
720 if component_ids_list:
721 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
723 fubar("No valid component given.")
727 # Process architecture
728 con_architectures = ""
730 if Options["Architecture"]:
732 for archname in split_args(Options["Architecture"]):
733 if archname == "source":
736 arch = get_architecture(archname, session=session)
738 warn("architecture '%s' not recognised." % (archname))
740 arch_ids_list.append(arch.arch_id)
742 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
745 fubar("No valid architecture given.")
749 return (con_suites, con_architectures, con_components, check_source)
751 ################################################################################
753 def arch_compare_sw (a, b):
755 Function for use in sorting lists of architectures.
757 Sorts normally except that 'source' dominates all others.
760 if a == "source" and b == "source":
769 ################################################################################
771 def split_args (s, dwim=True):
773 Split command line arguments which can be separated by either commas
774 or whitespace. If dwim is set, it will complain about string ending
775 in comma since this usually means someone did 'dak ls -a i386, m68k
776 foo' or something and the inevitable confusion resulting from 'm68k'
777 being treated as an argument is undesirable.
780 if s.find(",") == -1:
783 if s[-1:] == "," and dwim:
784 fubar("split_args: found trailing comma, spurious space maybe?")
787 ################################################################################
789 def gpg_keyring_args(keyrings=None):
791 keyrings = get_active_keyring_paths()
793 return " ".join(["--keyring %s" % x for x in keyrings])
795 ################################################################################
797 def gpg_get_key_addresses(fingerprint):
798 """retreive email addresses from gpg key uids for a given fingerprint"""
799 addresses = key_uid_email_cache.get(fingerprint)
800 if addresses != None:
804 with open(os.devnull, "wb") as devnull:
805 output = daklib.daksubprocess.check_output(
806 ["gpg", "--no-default-keyring"] + gpg_keyring_args().split() +
807 ["--with-colons", "--list-keys", fingerprint], stderr=devnull)
808 except subprocess.CalledProcessError:
811 for l in output.split('\n'):
813 if parts[0] not in ("uid", "pub"):
820 # Do not use unicode_escape, because it is locale-specific
821 uid = codecs.decode(uid, "string_escape").decode("utf-8")
822 except UnicodeDecodeError:
823 uid = uid.decode("latin1") # does not fail
824 m = re_parse_maintainer.match(uid)
828 address = address.encode("utf8") # dak still uses bytes
829 if address.endswith('@debian.org'):
830 # prefer @debian.org addresses
831 # TODO: maybe not hardcode the domain
832 addresses.insert(0, address)
834 addresses.append(address)
835 key_uid_email_cache[fingerprint] = addresses
838 ################################################################################
840 def get_logins_from_ldap(fingerprint='*'):
841 """retrieve login from LDAP linked to a given fingerprint"""
843 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
844 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
845 l = ldap.open(LDAPServer)
846 l.simple_bind_s('','')
847 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
848 '(keyfingerprint=%s)' % fingerprint,
849 ['uid', 'keyfingerprint'])
852 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
855 ################################################################################
857 def get_users_from_ldap():
858 """retrieve login and user names from LDAP"""
860 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
861 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
862 l = ldap.open(LDAPServer)
863 l.simple_bind_s('','')
864 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
865 '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
870 for k in ('cn', 'mn', 'sn'):
872 if elem[k][0] != '-':
873 name.append(elem[k][0])
876 users[' '.join(name)] = elem['uid'][0]
879 ################################################################################
881 def clean_symlink (src, dest, root):
883 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
886 src = src.replace(root, '', 1)
887 dest = dest.replace(root, '', 1)
888 dest = os.path.dirname(dest)
889 new_src = '../' * len(dest.split('/'))
892 ################################################################################
894 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
896 Return a secure and unique filename by pre-creating it.
899 @param directory: If non-null it will be the directory the file is pre-created in.
902 @param prefix: The filename will be prefixed with this string
905 @param suffix: The filename will end with this string
908 @param mode: If set the file will get chmodded to those permissions
911 @param group: If set the file will get chgrped to the specified group.
914 @return: Returns a pair (fd, name)
917 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
919 os.chmod(tfname, mode)
921 gid = grp.getgrnam(group).gr_gid
922 os.chown(tfname, -1, gid)
925 ################################################################################
927 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
929 Return a secure and unique directory by pre-creating it.
932 @param parent: If non-null it will be the directory the directory is pre-created in.
935 @param prefix: The filename will be prefixed with this string
938 @param suffix: The filename will end with this string
941 @param mode: If set the file will get chmodded to those permissions
944 @param group: If set the file will get chgrped to the specified group.
947 @return: Returns a pair (fd, name)
951 tfname = tempfile.mkdtemp(suffix, prefix, parent)
953 os.chmod(tfname, mode)
955 gid = grp.getgrnam(group).gr_gid
956 os.chown(tfname, -1, gid)
959 ################################################################################
961 def is_email_alias(email):
962 """ checks if the user part of the email is listed in the alias file """
964 if alias_cache == None:
965 aliasfn = which_alias_file()
968 for l in open(aliasfn):
969 alias_cache.add(l.split(':')[0])
970 uid = email.split('@')[0]
971 return uid in alias_cache
973 ################################################################################
975 def get_changes_files(from_dir):
977 Takes a directory and lists all .changes files in it (as well as chdir'ing
978 to the directory; this is due to broken behaviour on the part of p-u/p-a
979 when you're not in the right place)
981 Returns a list of filenames
984 # Much of the rest of p-u/p-a depends on being in the right place
986 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
988 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
992 ################################################################################
994 Cnf = config.Config().Cnf
996 ################################################################################
998 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1000 Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm
1001 Well, actually it parsed a local copy, but let's document the source
1004 returns a dict associating source package name with a list of open wnpp
1005 bugs (Yes, there might be more than one)
1011 lines = f.readlines()
1012 except IOError as e:
1013 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1018 splited_line = line.split(": ", 1)
1019 if len(splited_line) > 1:
1020 wnpp[splited_line[0]] = splited_line[1].split("|")
1022 for source in wnpp.keys():
1024 for wnpp_bug in wnpp[source]:
1025 bug_no = re.search("(\d)+", wnpp_bug).group()
1031 ################################################################################
1033 def get_packages_from_ftp(root, suite, component, architecture):
1035 Returns an object containing apt_pkg-parseable data collected by
1036 aggregating Packages.gz files gathered for each architecture.
1039 @param root: path to ftp archive root directory
1042 @param suite: suite to extract files from
1044 @type component: string
1045 @param component: component to extract files from
1047 @type architecture: string
1048 @param architecture: architecture to extract files from
1051 @return: apt_pkg class containing package data
1053 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1054 (fd, temp_file) = temp_filename()
1055 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1057 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1058 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1059 if os.path.exists(filename):
1060 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1062 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1063 packages = open_file(temp_file)
1064 Packages = apt_pkg.TagFile(packages)
1065 os.unlink(temp_file)
1068 ################################################################################
1070 def deb_extract_control(fh):
1071 """extract DEBIAN/control from a binary package"""
1072 return apt_inst.DebFile(fh).control.extractdata("control")
1074 ################################################################################
1076 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1077 """mail addresses to contact for an upload
1079 @type maintainer: str
1080 @param maintainer: Maintainer field of the .changes file
1082 @type changed_by: str
1083 @param changed_by: Changed-By field of the .changes file
1085 @type fingerprint: str
1086 @param fingerprint: fingerprint of the key used to sign the upload
1089 @return: list of RFC 2047-encoded mail addresses to contact regarding
1092 addresses = [maintainer]
1093 if changed_by != maintainer:
1094 addresses.append(changed_by)
1096 fpr_addresses = gpg_get_key_addresses(fingerprint)
1097 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1098 addresses.append(fpr_addresses[0])
1100 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1101 return encoded_addresses
1103 ################################################################################
1105 def call_editor(text="", suffix=".txt"):
1106 """run editor and return the result as a string
1109 @param text: initial text
1112 @param suffix: extension for temporary file
1115 @return: string with the edited text
1117 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1118 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1122 daklib.daksubprocess.check_call([editor, tmp.name])
1123 return open(tmp.name, 'r').read()
1127 ################################################################################
1129 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False, include_arch_all=True):
1130 dbsuite = get_suite(suite, session)
1131 overridesuite = dbsuite
1132 if dbsuite.overridesuite is not None:
1133 overridesuite = get_suite(dbsuite.overridesuite, session)
1136 all_broken = defaultdict(lambda: defaultdict(set))
1138 all_arches = set(arches)
1140 all_arches = set(x.arch_string for x in get_suite_architectures(suite))
1141 all_arches -= set(["source", "all"])
1142 removal_set = set(removals)
1143 metakey_d = get_or_set_metadatakey("Depends", session)
1144 metakey_p = get_or_set_metadatakey("Provides", session)
1146 'suite_id': dbsuite.suite_id,
1147 'metakey_d_id': metakey_d.key_id,
1148 'metakey_p_id': metakey_p.key_id,
1150 if include_arch_all:
1151 rdep_architectures = all_arches | set(['all'])
1153 rdep_architectures = all_arches
1154 for architecture in rdep_architectures:
1157 virtual_packages = {}
1158 params['arch_id'] = get_architecture(architecture, session).arch_id
1161 SELECT b.package, s.source, c.name as component,
1162 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1163 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1165 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1166 JOIN source s ON b.source = s.id
1167 JOIN files_archive_map af ON b.file = af.file_id
1168 JOIN component c ON af.component_id = c.id
1169 WHERE b.architecture = :arch_id'''
1170 query = session.query('package', 'source', 'component', 'depends', 'provides'). \
1171 from_statement(statement).params(params)
1172 for package, source, component, depends, provides in query:
1173 sources[package] = source
1174 p2c[package] = component
1175 if depends is not None:
1176 deps[package] = depends
1177 # Maintain a counter for each virtual package. If a
1178 # Provides: exists, set the counter to 0 and count all
1179 # provides by a package not in the list for removal.
1180 # If the counter stays 0 at the end, we know that only
1181 # the to-be-removed packages provided this virtual
1183 if provides is not None:
1184 for virtual_pkg in provides.split(","):
1185 virtual_pkg = virtual_pkg.strip()
1186 if virtual_pkg == package: continue
1187 if not virtual_packages.has_key(virtual_pkg):
1188 virtual_packages[virtual_pkg] = 0
1189 if package not in removals:
1190 virtual_packages[virtual_pkg] += 1
1192 # If a virtual package is only provided by the to-be-removed
1193 # packages, treat the virtual package as to-be-removed too.
1194 removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])
1196 # Check binary dependencies (Depends)
1197 for package in deps:
1198 if package in removals: continue
1200 parsed_dep = apt_pkg.parse_depends(deps[package])
1201 except ValueError as e:
1202 print "Error for package %s: %s" % (package, e)
1204 for dep in parsed_dep:
1205 # Check for partial breakage. If a package has a ORed
1206 # dependency, there is only a dependency problem if all
1207 # packages in the ORed depends will be removed.
1209 for dep_package, _, _ in dep:
1210 if dep_package in removals:
1212 if unsat == len(dep):
1213 component = p2c[package]
1214 source = sources[package]
1215 if component != "main":
1216 source = "%s/%s" % (source, component)
1217 all_broken[source][package].add(architecture)
1220 if all_broken and not quiet:
1222 print " - broken Depends:"
1224 print "# Broken Depends:"
1225 for source, bindict in sorted(all_broken.items()):
1227 for binary, arches in sorted(bindict.items()):
1228 if arches == all_arches or 'all' in arches:
1229 lines.append(binary)
1231 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1233 print ' %s: %s' % (source, lines[0])
1235 print '%s: %s' % (source, lines[0])
1236 for line in lines[1:]:
1238 print ' ' + ' ' * (len(source) + 2) + line
1240 print ' ' * (len(source) + 2) + line
1244 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1245 all_broken = defaultdict(set)
1246 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1247 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1248 if include_arch_all:
1249 metakey_ids = (metakey_bd.key_id, metakey_bdi.key_id)
1251 metakey_ids = (metakey_bd.key_id,)
1254 'suite_id': dbsuite.suite_id,
1255 'metakey_ids': metakey_ids,
1258 SELECT s.source, string_agg(sm.value, ', ') as build_dep
1260 JOIN source_metadata sm ON s.id = sm.src_id
1262 (SELECT src FROM newest_src_association
1263 WHERE suite = :suite_id)
1264 AND sm.key_id in :metakey_ids
1265 GROUP BY s.id, s.source'''
1266 query = session.query('source', 'build_dep').from_statement(statement). \
1268 for source, build_dep in query:
1269 if source in removals: continue
1271 if build_dep is not None:
1272 # Remove [arch] information since we want to see breakage on all arches
1273 build_dep = re_build_dep_arch.sub("", build_dep)
1275 parsed_dep = apt_pkg.parse_src_depends(build_dep)
1276 except ValueError as e:
1277 print "Error for source %s: %s" % (source, e)
1278 for dep in parsed_dep:
1280 for dep_package, _, _ in dep:
1281 if dep_package in removals:
1283 if unsat == len(dep):
1284 component, = session.query(Component.component_name) \
1285 .join(Component.overrides) \
1286 .filter(Override.suite == overridesuite) \
1287 .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
1288 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1291 if component != "main":
1292 key = "%s/%s" % (source, component)
1293 all_broken[key].add(pp_deps(dep))
1296 if all_broken and not quiet:
1298 print " - broken Build-Depends:"
1300 print "# Broken Build-Depends:"
1301 for source, bdeps in sorted(all_broken.items()):
1302 bdeps = sorted(bdeps)
1304 print ' %s: %s' % (source, bdeps[0])
1306 print '%s: %s' % (source, bdeps[0])
1307 for bdep in bdeps[1:]:
1309 print ' ' + ' ' * (len(source) + 2) + bdep
1311 print ' ' * (len(source) + 2) + bdep
1317 ################################################################################
1319 def parse_built_using(control):
1320 """source packages referenced via Built-Using
1322 @type control: dict-like
1323 @param control: control file to take Built-Using field from
1325 @rtype: list of (str, str)
1326 @return: list of (source_name, source_version) pairs
1328 built_using = control.get('Built-Using', None)
1329 if built_using is None:
1333 for dep in apt_pkg.parse_depends(built_using):
1334 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
1335 source_name, source_version, comp = dep[0]
1336 assert comp == '=', 'Built-Using must contain strict dependencies'
1337 bu.append((source_name, source_version))
1341 ################################################################################
1343 def is_in_debug_section(control):
1344 """binary package is a debug package
1346 @type control: dict-like
1347 @param control: control file of binary package
1350 @return: True if the binary package is a debug package
1352 section = control['Section'].split('/', 1)[-1]
1353 auto_built_package = control.get("Auto-Built-Package")
1354 return section == "debug" and auto_built_package == "debug-symbols"