2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
43 import email as modemail
48 import daklib.config as config
49 import daklib.daksubprocess
50 from dbconn import DBConn, get_architecture, get_component, get_suite, \
51 get_override_type, Keyring, session_wrapper, \
52 get_active_keyring_paths, \
53 get_suite_architectures, get_or_set_metadatakey, DBSource, \
54 Component, Override, OverrideType
55 from sqlalchemy import desc
56 from dak_exceptions import *
57 from gpg import SignedFile
58 from textutils import fix_maintainer
59 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
60 re_multi_line_field, re_srchasver, re_taint_free, \
61 re_re_mark, re_whitespace_comment, re_issource, \
62 re_build_dep_arch, re_parse_maintainer
64 from formats import parse_format, validate_changes_format
65 from srcformats import get_format_from_string
66 from collections import defaultdict
68 ################################################################################
70 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
72 alias_cache = None #: Cache for email alias checks
73 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
75 # (hashname, function, earliest_changes_version)
76 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
77 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
79 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
80 # code in lenny's Python. This also affects commands.getoutput and
82 def dak_getstatusoutput(cmd):
83 pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
84 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
86 output = pipe.stdout.read()
90 if output[-1:] == '\n':
98 commands.getstatusoutput = dak_getstatusoutput
100 ################################################################################
103 """ Escape html chars """
104 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
106 ################################################################################
108 def open_file(filename, mode='r'):
110 Open C{file}, return fileobject.
112 @type filename: string
113 @param filename: path/filename to open
116 @param mode: open mode
119 @return: open fileobject
121 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
125 f = open(filename, mode)
127 raise CantOpenError(filename)
130 ################################################################################
132 def our_raw_input(prompt=""):
136 sys.stdout.write(prompt)
145 sys.stderr.write("\nUser interrupt (^D).\n")
148 ################################################################################
150 def extract_component_from_section(section, session=None):
153 if section.find('/') != -1:
154 component = section.split('/')[0]
156 # Expand default component
160 return (section, component)
162 ################################################################################
164 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
165 require_signature = True
168 require_signature = False
170 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
171 contents = signed_file.contents
176 # Split the lines in the input, keeping the linebreaks.
177 lines = contents.splitlines(True)
180 raise ParseChangesError("[Empty changes file]")
182 # Reindex by line number so we can easily verify the format of
188 indexed_lines[index] = line[:-1]
190 num_of_lines = len(indexed_lines.keys())
193 while index < num_of_lines:
195 line = indexed_lines[index]
196 if line == "" and signing_rules == 1:
197 if index != num_of_lines:
198 raise InvalidDscError(index)
200 slf = re_single_line_field.match(line)
202 field = slf.groups()[0].lower()
203 changes[field] = slf.groups()[1]
207 changes[field] += '\n'
209 mlf = re_multi_line_field.match(line)
212 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
213 if first == 1 and changes[field] != "":
214 changes[field] += '\n'
216 changes[field] += mlf.groups()[0] + '\n'
220 changes["filecontents"] = armored_contents
222 if changes.has_key("source"):
223 # Strip the source version in brackets from the source field,
224 # put it in the "source-version" field instead.
225 srcver = re_srchasver.search(changes["source"])
227 changes["source"] = srcver.group(1)
228 changes["source-version"] = srcver.group(2)
231 raise ParseChangesError(error)
235 ################################################################################
237 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
239 Parses a changes file and returns a dictionary where each field is a
240 key. The mandatory first argument is the filename of the .changes
243 signing_rules is an optional argument:
245 - If signing_rules == -1, no signature is required.
246 - If signing_rules == 0 (the default), a signature is required.
247 - If signing_rules == 1, it turns on the same strict format checking
250 The rules for (signing_rules == 1)-mode are:
252 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
253 followed by any PGP header data and must end with a blank line.
255 - The data section must end with a blank line and must be followed by
256 "-----BEGIN PGP SIGNATURE-----".
259 with open_file(filename) as changes_in:
260 content = changes_in.read()
262 unicode(content, 'utf-8')
264 raise ChangesUnicodeError("Changes file not proper utf-8")
265 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
269 # Finally ensure that everything needed for .changes is there
270 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
271 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
274 for keyword in must_keywords:
275 if not changes.has_key(keyword.lower()):
276 missingfields.append(keyword)
278 if len(missingfields):
279 raise ParseChangesError("Missing mandatory field(s) in changes file (policy 5.5): %s" % (missingfields))
283 ################################################################################
285 def hash_key(hashname):
286 return '%ssum' % hashname
288 ################################################################################
290 def check_dsc_files(dsc_filename, dsc, dsc_files):
292 Verify that the files listed in the Files field of the .dsc are
293 those expected given the announced Format.
295 @type dsc_filename: string
296 @param dsc_filename: path of .dsc file
299 @param dsc: the content of the .dsc parsed by C{parse_changes()}
301 @type dsc_files: dict
302 @param dsc_files: the file list returned by C{build_file_list()}
305 @return: all errors detected
309 # Ensure .dsc lists proper set of source files according to the format
311 has = defaultdict(lambda: 0)
314 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
315 (r'diff.gz', ('debian_diff',)),
316 (r'tar.gz', ('native_tar_gz', 'native_tar')),
317 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
318 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
319 (r'orig\.tar\.(gz|bz2|xz)\.asc', ('orig_tar_sig',)),
320 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
321 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
322 (r'orig-.+\.tar\.(gz|bz2|xz)\.asc', ('more_orig_tar_sig',)),
326 m = re_issource.match(f)
328 rejmsg.append("%s: %s in Files field not recognised as source."
332 # Populate 'has' dictionary by resolving keys in lookup table
334 for regex, keys in ftype_lookup:
335 if re.match(regex, m.group(3)):
341 # File does not match anything in lookup table; reject
343 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
345 # Check for multiple files
346 for file_type in ('orig_tar', 'orig_tar_sig', 'native_tar', 'debian_tar', 'debian_diff'):
347 if has[file_type] > 1:
348 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
350 # Source format specific tests
352 format = get_format_from_string(dsc['format'])
354 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
357 except UnknownFormatError:
358 # Not an error here for now
363 ################################################################################
365 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
367 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
370 # Make sure we have a Files: field to parse...
371 if not changes.has_key(field):
372 raise NoFilesFieldError
374 # Validate .changes Format: field
376 validate_changes_format(parse_format(changes['format']), field)
378 includes_section = (not is_a_dsc) and field == "files"
380 # Parse each entry/line:
381 for i in changes[field].split('\n'):
385 section = priority = ""
388 (md5, size, section, priority, name) = s
390 (md5, size, name) = s
392 raise ParseChangesError(i)
399 (section, component) = extract_component_from_section(section)
401 files[name] = dict(size=size, section=section,
402 priority=priority, component=component)
403 files[name][hashname] = md5
407 ################################################################################
409 def send_mail (message, filename="", whitelists=None):
410 """sendmail wrapper, takes _either_ a message string or a file as arguments
412 @type whitelists: list of (str or None)
413 @param whitelists: path to whitelists. C{None} or an empty list whitelists
414 everything, otherwise an address is whitelisted if it is
415 included in any of the lists.
416 In addition a global whitelist can be specified in
417 Dinstall::MailWhiteList.
420 maildir = Cnf.get('Dir::Mail')
422 path = os.path.join(maildir, datetime.datetime.now().isoformat())
423 path = find_next_free(path)
424 with open(path, 'w') as fh:
427 # Check whether we're supposed to be sending mail
428 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
431 # If we've been passed a string dump it into a temporary file
433 (fd, filename) = tempfile.mkstemp()
434 os.write (fd, message)
437 if whitelists is None or None in whitelists:
439 if Cnf.get('Dinstall::MailWhiteList', ''):
440 whitelists.append(Cnf['Dinstall::MailWhiteList'])
441 if len(whitelists) != 0:
442 with open_file(filename) as message_in:
443 message_raw = modemail.message_from_file(message_in)
446 for path in whitelists:
447 with open_file(path, 'r') as whitelist_in:
448 for line in whitelist_in:
449 if not re_whitespace_comment.match(line):
450 if re_re_mark.match(line):
451 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
453 whitelist.append(re.compile(re.escape(line.strip())))
456 fields = ["To", "Bcc", "Cc"]
459 value = message_raw.get(field, None)
462 for item in value.split(","):
463 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
469 if not mail_whitelisted:
470 print "Skipping {0} since it's not whitelisted".format(item)
474 # Doesn't have any mail in whitelist so remove the header
476 del message_raw[field]
478 message_raw.replace_header(field, ', '.join(match))
480 # Change message fields in order if we don't have a To header
481 if not message_raw.has_key("To"):
484 if message_raw.has_key(field):
485 message_raw[fields[-1]] = message_raw[field]
486 del message_raw[field]
489 # Clean up any temporary files
490 # and return, as we removed all recipients.
492 os.unlink (filename);
495 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
496 os.write (fd, message_raw.as_string(True));
500 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
502 raise SendmailFailedError(output)
504 # Clean up any temporary files
508 ################################################################################
510 def poolify (source, component=None):
511 if source[:3] == "lib":
512 return source[:4] + '/' + source + '/'
514 return source[:1] + '/' + source + '/'
516 ################################################################################
518 def move (src, dest, overwrite = 0, perms = 0o664):
519 if os.path.exists(dest) and os.path.isdir(dest):
522 dest_dir = os.path.dirname(dest)
523 if not os.path.lexists(dest_dir):
524 umask = os.umask(00000)
525 os.makedirs(dest_dir, 0o2775)
527 #print "Moving %s to %s..." % (src, dest)
528 if os.path.exists(dest) and os.path.isdir(dest):
529 dest += '/' + os.path.basename(src)
530 # Don't overwrite unless forced to
531 if os.path.lexists(dest):
533 fubar("Can't move %s to %s - file already exists." % (src, dest))
535 if not os.access(dest, os.W_OK):
536 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
537 shutil.copy2(src, dest)
538 os.chmod(dest, perms)
541 def copy (src, dest, overwrite = 0, perms = 0o664):
542 if os.path.exists(dest) and os.path.isdir(dest):
545 dest_dir = os.path.dirname(dest)
546 if not os.path.exists(dest_dir):
547 umask = os.umask(00000)
548 os.makedirs(dest_dir, 0o2775)
550 #print "Copying %s to %s..." % (src, dest)
551 if os.path.exists(dest) and os.path.isdir(dest):
552 dest += '/' + os.path.basename(src)
553 # Don't overwrite unless forced to
554 if os.path.lexists(dest):
556 raise FileExistsError
558 if not os.access(dest, os.W_OK):
559 raise CantOverwriteError
560 shutil.copy2(src, dest)
561 os.chmod(dest, perms)
563 ################################################################################
565 def which_conf_file ():
566 if os.getenv('DAK_CONFIG'):
567 return os.getenv('DAK_CONFIG')
569 res = socket.getfqdn()
570 # In case we allow local config files per user, try if one exists
571 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
572 homedir = os.getenv("HOME")
573 confpath = os.path.join(homedir, "/etc/dak.conf")
574 if os.path.exists(confpath):
575 apt_pkg.read_config_file_isc(Cnf,confpath)
577 # We are still in here, so there is no local config file or we do
578 # not allow local files. Do the normal stuff.
579 if Cnf.get("Config::" + res + "::DakConfig"):
580 return Cnf["Config::" + res + "::DakConfig"]
582 return default_config
584 ################################################################################
586 def TemplateSubst(subst_map, filename):
587 """ Perform a substition of template """
588 with open_file(filename) as templatefile:
589 template = templatefile.read()
590 for k, v in subst_map.iteritems():
591 template = template.replace(k, str(v))
594 ################################################################################
596 def fubar(msg, exit_code=1):
597 sys.stderr.write("E: %s\n" % (msg))
601 sys.stderr.write("W: %s\n" % (msg))
603 ################################################################################
605 # Returns the user name with a laughable attempt at rfc822 conformancy
606 # (read: removing stray periods).
608 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
611 return pwd.getpwuid(os.getuid())[0]
613 ################################################################################
623 return ("%d%s" % (c, t))
625 ################################################################################
627 def find_next_free (dest, too_many=100):
630 while os.path.lexists(dest) and extra < too_many:
631 dest = orig_dest + '.' + repr(extra)
633 if extra >= too_many:
634 raise NoFreeFilenameError
637 ################################################################################
639 def result_join (original, sep = '\t'):
641 for i in xrange(len(original)):
642 if original[i] == None:
643 resultlist.append("")
645 resultlist.append(original[i])
646 return sep.join(resultlist)
648 ################################################################################
650 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
652 for line in str.split('\n'):
654 if line or include_blank_lines:
655 out += "%s%s\n" % (prefix, line)
656 # Strip trailing new line
661 ################################################################################
663 def join_with_commas_and(list):
664 if len(list) == 0: return "nothing"
665 if len(list) == 1: return list[0]
666 return ", ".join(list[:-1]) + " and " + list[-1]
668 ################################################################################
673 (pkg, version, constraint) = atom
675 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
678 pp_deps.append(pp_dep)
679 return " |".join(pp_deps)
681 ################################################################################
686 ################################################################################
688 def parse_args(Options):
689 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
690 # XXX: This should go away and everything which calls it be converted
691 # to use SQLA properly. For now, we'll just fix it not to use
692 # the old Pg interface though
693 session = DBConn().session()
697 for suitename in split_args(Options["Suite"]):
698 suite = get_suite(suitename, session=session)
699 if not suite or suite.suite_id is None:
700 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
702 suite_ids_list.append(suite.suite_id)
704 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
706 fubar("No valid suite given.")
711 if Options["Component"]:
712 component_ids_list = []
713 for componentname in split_args(Options["Component"]):
714 component = get_component(componentname, session=session)
715 if component is None:
716 warn("component '%s' not recognised." % (componentname))
718 component_ids_list.append(component.component_id)
719 if component_ids_list:
720 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
722 fubar("No valid component given.")
726 # Process architecture
727 con_architectures = ""
729 if Options["Architecture"]:
731 for archname in split_args(Options["Architecture"]):
732 if archname == "source":
735 arch = get_architecture(archname, session=session)
737 warn("architecture '%s' not recognised." % (archname))
739 arch_ids_list.append(arch.arch_id)
741 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
744 fubar("No valid architecture given.")
748 return (con_suites, con_architectures, con_components, check_source)
750 ################################################################################
752 def arch_compare_sw (a, b):
754 Function for use in sorting lists of architectures.
756 Sorts normally except that 'source' dominates all others.
759 if a == "source" and b == "source":
768 ################################################################################
770 def split_args (s, dwim=True):
772 Split command line arguments which can be separated by either commas
773 or whitespace. If dwim is set, it will complain about string ending
774 in comma since this usually means someone did 'dak ls -a i386, m68k
775 foo' or something and the inevitable confusion resulting from 'm68k'
776 being treated as an argument is undesirable.
779 if s.find(",") == -1:
782 if s[-1:] == "," and dwim:
783 fubar("split_args: found trailing comma, spurious space maybe?")
786 ################################################################################
788 def gpg_keyring_args(keyrings=None):
790 keyrings = get_active_keyring_paths()
792 return " ".join(["--keyring %s" % x for x in keyrings])
794 ################################################################################
796 def gpg_get_key_addresses(fingerprint):
797 """retreive email addresses from gpg key uids for a given fingerprint"""
798 addresses = key_uid_email_cache.get(fingerprint)
799 if addresses != None:
803 with open(os.devnull, "wb") as devnull:
804 output = daklib.daksubprocess.check_output(
805 ["gpg", "--no-default-keyring"] + gpg_keyring_args().split() +
806 ["--with-colons", "--list-keys", fingerprint], stderr=devnull)
807 except subprocess.CalledProcessError:
810 for l in output.split('\n'):
812 if parts[0] not in ("uid", "pub"):
819 # Do not use unicode_escape, because it is locale-specific
820 uid = codecs.decode(uid, "string_escape").decode("utf-8")
821 except UnicodeDecodeError:
822 uid = uid.decode("latin1") # does not fail
823 m = re_parse_maintainer.match(uid)
827 address = address.encode("utf8") # dak still uses bytes
828 if address.endswith('@debian.org'):
829 # prefer @debian.org addresses
830 # TODO: maybe not hardcode the domain
831 addresses.insert(0, address)
833 addresses.append(address)
834 key_uid_email_cache[fingerprint] = addresses
837 ################################################################################
839 def get_logins_from_ldap(fingerprint='*'):
840 """retrieve login from LDAP linked to a given fingerprint"""
842 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
843 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
844 l = ldap.open(LDAPServer)
845 l.simple_bind_s('','')
846 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
847 '(keyfingerprint=%s)' % fingerprint,
848 ['uid', 'keyfingerprint'])
851 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
854 ################################################################################
856 def get_users_from_ldap():
857 """retrieve login and user names from LDAP"""
859 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
860 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
861 l = ldap.open(LDAPServer)
862 l.simple_bind_s('','')
863 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
864 '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
869 for k in ('cn', 'mn', 'sn'):
871 if elem[k][0] != '-':
872 name.append(elem[k][0])
875 users[' '.join(name)] = elem['uid'][0]
878 ################################################################################
880 def clean_symlink (src, dest, root):
882 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
885 src = src.replace(root, '', 1)
886 dest = dest.replace(root, '', 1)
887 dest = os.path.dirname(dest)
888 new_src = '../' * len(dest.split('/'))
891 ################################################################################
893 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
895 Return a secure and unique filename by pre-creating it.
898 @param directory: If non-null it will be the directory the file is pre-created in.
901 @param prefix: The filename will be prefixed with this string
904 @param suffix: The filename will end with this string
907 @param mode: If set the file will get chmodded to those permissions
910 @param group: If set the file will get chgrped to the specified group.
913 @return: Returns a pair (fd, name)
916 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
918 os.chmod(tfname, mode)
920 gid = grp.getgrnam(group).gr_gid
921 os.chown(tfname, -1, gid)
924 ################################################################################
926 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
928 Return a secure and unique directory by pre-creating it.
931 @param parent: If non-null it will be the directory the directory is pre-created in.
934 @param prefix: The filename will be prefixed with this string
937 @param suffix: The filename will end with this string
940 @param mode: If set the file will get chmodded to those permissions
943 @param group: If set the file will get chgrped to the specified group.
946 @return: Returns a pair (fd, name)
950 tfname = tempfile.mkdtemp(suffix, prefix, parent)
952 os.chmod(tfname, mode)
954 gid = grp.getgrnam(group).gr_gid
955 os.chown(tfname, -1, gid)
958 ################################################################################
960 def is_email_alias(email):
961 """ checks if the user part of the email is listed in the alias file """
963 if alias_cache == None:
964 aliasfn = which_alias_file()
967 for l in open(aliasfn):
968 alias_cache.add(l.split(':')[0])
969 uid = email.split('@')[0]
970 return uid in alias_cache
972 ################################################################################
974 def get_changes_files(from_dir):
976 Takes a directory and lists all .changes files in it (as well as chdir'ing
977 to the directory; this is due to broken behaviour on the part of p-u/p-a
978 when you're not in the right place)
980 Returns a list of filenames
983 # Much of the rest of p-u/p-a depends on being in the right place
985 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
987 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
991 ################################################################################
993 Cnf = config.Config().Cnf
995 ################################################################################
997 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
999 Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm
1000 Well, actually it parsed a local copy, but let's document the source
1003 returns a dict associating source package name with a list of open wnpp
1004 bugs (Yes, there might be more than one)
1010 lines = f.readlines()
1011 except IOError as e:
1012 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1017 splited_line = line.split(": ", 1)
1018 if len(splited_line) > 1:
1019 wnpp[splited_line[0]] = splited_line[1].split("|")
1021 for source in wnpp.keys():
1023 for wnpp_bug in wnpp[source]:
1024 bug_no = re.search("(\d)+", wnpp_bug).group()
1030 ################################################################################
1032 def get_packages_from_ftp(root, suite, component, architecture):
1034 Returns an object containing apt_pkg-parseable data collected by
1035 aggregating Packages.gz files gathered for each architecture.
1038 @param root: path to ftp archive root directory
1041 @param suite: suite to extract files from
1043 @type component: string
1044 @param component: component to extract files from
1046 @type architecture: string
1047 @param architecture: architecture to extract files from
1050 @return: apt_pkg class containing package data
1052 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1053 (fd, temp_file) = temp_filename()
1054 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1056 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1057 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1058 if os.path.exists(filename):
1059 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1061 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1062 packages = open_file(temp_file)
1063 Packages = apt_pkg.TagFile(packages)
1064 os.unlink(temp_file)
1067 ################################################################################
1069 def deb_extract_control(fh):
1070 """extract DEBIAN/control from a binary package"""
1071 return apt_inst.DebFile(fh).control.extractdata("control")
1073 ################################################################################
1075 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1076 """mail addresses to contact for an upload
1078 @type maintainer: str
1079 @param maintainer: Maintainer field of the .changes file
1081 @type changed_by: str
1082 @param changed_by: Changed-By field of the .changes file
1084 @type fingerprint: str
1085 @param fingerprint: fingerprint of the key used to sign the upload
1088 @return: list of RFC 2047-encoded mail addresses to contact regarding
1091 addresses = [maintainer]
1092 if changed_by != maintainer:
1093 addresses.append(changed_by)
1095 fpr_addresses = gpg_get_key_addresses(fingerprint)
1096 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1097 addresses.append(fpr_addresses[0])
1099 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1100 return encoded_addresses
1102 ################################################################################
1104 def call_editor(text="", suffix=".txt"):
1105 """run editor and return the result as a string
1108 @param text: initial text
1111 @param suffix: extension for temporary file
1114 @return: string with the edited text
1116 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1117 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1121 daklib.daksubprocess.check_call([editor, tmp.name])
1122 return open(tmp.name, 'r').read()
1126 ################################################################################
1128 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False, include_arch_all=True):
1129 dbsuite = get_suite(suite, session)
1130 overridesuite = dbsuite
1131 if dbsuite.overridesuite is not None:
1132 overridesuite = get_suite(dbsuite.overridesuite, session)
1135 all_broken = defaultdict(lambda: defaultdict(set))
1137 all_arches = set(arches)
1139 all_arches = set(x.arch_string for x in get_suite_architectures(suite))
1140 all_arches -= set(["source", "all"])
1141 removal_set = set(removals)
1142 metakey_d = get_or_set_metadatakey("Depends", session)
1143 metakey_p = get_or_set_metadatakey("Provides", session)
1145 'suite_id': dbsuite.suite_id,
1146 'metakey_d_id': metakey_d.key_id,
1147 'metakey_p_id': metakey_p.key_id,
1149 if include_arch_all:
1150 rdep_architectures = all_arches | set(['all'])
1152 rdep_architectures = all_arches
1153 for architecture in rdep_architectures:
1156 virtual_packages = {}
1157 params['arch_id'] = get_architecture(architecture, session).arch_id
1160 SELECT b.package, s.source, c.name as component,
1161 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1162 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1164 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1165 JOIN source s ON b.source = s.id
1166 JOIN files_archive_map af ON b.file = af.file_id
1167 JOIN component c ON af.component_id = c.id
1168 WHERE b.architecture = :arch_id'''
1169 query = session.query('package', 'source', 'component', 'depends', 'provides'). \
1170 from_statement(statement).params(params)
1171 for package, source, component, depends, provides in query:
1172 sources[package] = source
1173 p2c[package] = component
1174 if depends is not None:
1175 deps[package] = depends
1176 # Maintain a counter for each virtual package. If a
1177 # Provides: exists, set the counter to 0 and count all
1178 # provides by a package not in the list for removal.
1179 # If the counter stays 0 at the end, we know that only
1180 # the to-be-removed packages provided this virtual
1182 if provides is not None:
1183 for virtual_pkg in provides.split(","):
1184 virtual_pkg = virtual_pkg.strip()
1185 if virtual_pkg == package: continue
1186 if not virtual_packages.has_key(virtual_pkg):
1187 virtual_packages[virtual_pkg] = 0
1188 if package not in removals:
1189 virtual_packages[virtual_pkg] += 1
1191 # If a virtual package is only provided by the to-be-removed
1192 # packages, treat the virtual package as to-be-removed too.
1193 removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])
1195 # Check binary dependencies (Depends)
1196 for package in deps:
1197 if package in removals: continue
1199 parsed_dep = apt_pkg.parse_depends(deps[package])
1200 except ValueError as e:
1201 print "Error for package %s: %s" % (package, e)
1203 for dep in parsed_dep:
1204 # Check for partial breakage. If a package has a ORed
1205 # dependency, there is only a dependency problem if all
1206 # packages in the ORed depends will be removed.
1208 for dep_package, _, _ in dep:
1209 if dep_package in removals:
1211 if unsat == len(dep):
1212 component = p2c[package]
1213 source = sources[package]
1214 if component != "main":
1215 source = "%s/%s" % (source, component)
1216 all_broken[source][package].add(architecture)
1219 if all_broken and not quiet:
1221 print " - broken Depends:"
1223 print "# Broken Depends:"
1224 for source, bindict in sorted(all_broken.items()):
1226 for binary, arches in sorted(bindict.items()):
1227 if arches == all_arches or 'all' in arches:
1228 lines.append(binary)
1230 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1232 print ' %s: %s' % (source, lines[0])
1234 print '%s: %s' % (source, lines[0])
1235 for line in lines[1:]:
1237 print ' ' + ' ' * (len(source) + 2) + line
1239 print ' ' * (len(source) + 2) + line
1243 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1244 all_broken = defaultdict(set)
1245 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1246 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1247 if include_arch_all:
1248 metakey_ids = (metakey_bd.key_id, metakey_bdi.key_id)
1250 metakey_ids = (metakey_bd.key_id,)
1253 'suite_id': dbsuite.suite_id,
1254 'metakey_ids': metakey_ids,
1257 SELECT s.source, string_agg(sm.value, ', ') as build_dep
1259 JOIN source_metadata sm ON s.id = sm.src_id
1261 (SELECT src FROM newest_src_association
1262 WHERE suite = :suite_id)
1263 AND sm.key_id in :metakey_ids
1264 GROUP BY s.id, s.source'''
1265 query = session.query('source', 'build_dep').from_statement(statement). \
1267 for source, build_dep in query:
1268 if source in removals: continue
1270 if build_dep is not None:
1271 # Remove [arch] information since we want to see breakage on all arches
1272 build_dep = re_build_dep_arch.sub("", build_dep)
1274 parsed_dep = apt_pkg.parse_src_depends(build_dep)
1275 except ValueError as e:
1276 print "Error for source %s: %s" % (source, e)
1277 for dep in parsed_dep:
1279 for dep_package, _, _ in dep:
1280 if dep_package in removals:
1282 if unsat == len(dep):
1283 component, = session.query(Component.component_name) \
1284 .join(Component.overrides) \
1285 .filter(Override.suite == overridesuite) \
1286 .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
1287 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1290 if component != "main":
1291 key = "%s/%s" % (source, component)
1292 all_broken[key].add(pp_deps(dep))
1295 if all_broken and not quiet:
1297 print " - broken Build-Depends:"
1299 print "# Broken Build-Depends:"
1300 for source, bdeps in sorted(all_broken.items()):
1301 bdeps = sorted(bdeps)
1303 print ' %s: %s' % (source, bdeps[0])
1305 print '%s: %s' % (source, bdeps[0])
1306 for bdep in bdeps[1:]:
1308 print ' ' + ' ' * (len(source) + 2) + bdep
1310 print ' ' * (len(source) + 2) + bdep
1316 ################################################################################
1318 def parse_built_using(control):
1319 """source packages referenced via Built-Using
1321 @type control: dict-like
1322 @param control: control file to take Built-Using field from
1324 @rtype: list of (str, str)
1325 @return: list of (source_name, source_version) pairs
1327 built_using = control.get('Built-Using', None)
1328 if built_using is None:
1332 for dep in apt_pkg.parse_depends(built_using):
1333 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
1334 source_name, source_version, comp = dep[0]
1335 assert comp == '=', 'Built-Using must contain strict dependencies'
1336 bu.append((source_name, source_version))
1340 ################################################################################
1342 def is_in_debug_section(control):
1343 """binary package is a debug package
1345 @type control: dict-like
1346 @param control: control file of binary package
1349 @return: True if the binary package is a debug package
1351 section = control['Section'].split('/', 1)[-1]
1352 auto_built_package = control.get("Auto-Built-Package")
1353 return section == "debug" and auto_built_package == "debug-symbols"