2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
43 import email as modemail
48 import daklib.config as config
49 import daklib.daksubprocess
50 from dbconn import DBConn, get_architecture, get_component, get_suite, \
51 get_override_type, Keyring, session_wrapper, \
52 get_active_keyring_paths, \
53 get_suite_architectures, get_or_set_metadatakey, DBSource, \
54 Component, Override, OverrideType
55 from sqlalchemy import desc
56 from dak_exceptions import *
57 from gpg import SignedFile
58 from textutils import fix_maintainer
59 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
60 re_multi_line_field, re_srchasver, re_taint_free, \
61 re_re_mark, re_whitespace_comment, re_issource, \
62 re_build_dep_arch, re_parse_maintainer
64 from formats import parse_format, validate_changes_format
65 from srcformats import get_format_from_string
66 from collections import defaultdict
68 ################################################################################
70 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
72 alias_cache = None #: Cache for email alias checks
73 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
75 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
76 # code in lenny's Python. This also affects commands.getoutput and
78 def dak_getstatusoutput(cmd):
79 pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
80 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
82 output = pipe.stdout.read()
86 if output[-1:] == '\n':
94 commands.getstatusoutput = dak_getstatusoutput
96 ################################################################################
99 """ Escape html chars """
100 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
102 ################################################################################
104 def open_file(filename, mode='r'):
106 Open C{file}, return fileobject.
108 @type filename: string
109 @param filename: path/filename to open
112 @param mode: open mode
115 @return: open fileobject
117 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
121 f = open(filename, mode)
123 raise CantOpenError(filename)
126 ################################################################################
128 def our_raw_input(prompt=""):
132 sys.stdout.write(prompt)
141 sys.stderr.write("\nUser interrupt (^D).\n")
144 ################################################################################
146 def extract_component_from_section(section, session=None):
149 if section.find('/') != -1:
150 component = section.split('/')[0]
152 # Expand default component
156 return (section, component)
158 ################################################################################
160 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
161 require_signature = True
164 require_signature = False
166 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
167 contents = signed_file.contents
172 # Split the lines in the input, keeping the linebreaks.
173 lines = contents.splitlines(True)
176 raise ParseChangesError("[Empty changes file]")
178 # Reindex by line number so we can easily verify the format of
184 indexed_lines[index] = line[:-1]
186 num_of_lines = len(indexed_lines.keys())
189 while index < num_of_lines:
191 line = indexed_lines[index]
192 if line == "" and signing_rules == 1:
193 if index != num_of_lines:
194 raise InvalidDscError(index)
196 slf = re_single_line_field.match(line)
198 field = slf.groups()[0].lower()
199 changes[field] = slf.groups()[1]
203 changes[field] += '\n'
205 mlf = re_multi_line_field.match(line)
208 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
209 if first == 1 and changes[field] != "":
210 changes[field] += '\n'
212 changes[field] += mlf.groups()[0] + '\n'
216 changes["filecontents"] = armored_contents
218 if changes.has_key("source"):
219 # Strip the source version in brackets from the source field,
220 # put it in the "source-version" field instead.
221 srcver = re_srchasver.search(changes["source"])
223 changes["source"] = srcver.group(1)
224 changes["source-version"] = srcver.group(2)
227 raise ParseChangesError(error)
231 ################################################################################
233 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
235 Parses a changes file and returns a dictionary where each field is a
236 key. The mandatory first argument is the filename of the .changes
239 signing_rules is an optional argument:
241 - If signing_rules == -1, no signature is required.
242 - If signing_rules == 0 (the default), a signature is required.
243 - If signing_rules == 1, it turns on the same strict format checking
246 The rules for (signing_rules == 1)-mode are:
248 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
249 followed by any PGP header data and must end with a blank line.
251 - The data section must end with a blank line and must be followed by
252 "-----BEGIN PGP SIGNATURE-----".
255 with open_file(filename) as changes_in:
256 content = changes_in.read()
258 unicode(content, 'utf-8')
260 raise ChangesUnicodeError("Changes file not proper utf-8")
261 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
265 # Finally ensure that everything needed for .changes is there
266 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
267 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
270 for keyword in must_keywords:
271 if not changes.has_key(keyword.lower()):
272 missingfields.append(keyword)
274 if len(missingfields):
275 raise ParseChangesError("Missing mandatory field(s) in changes file (policy 5.5): %s" % (missingfields))
279 ################################################################################
281 def hash_key(hashname):
282 return '%ssum' % hashname
284 ################################################################################
286 def check_dsc_files(dsc_filename, dsc, dsc_files):
288 Verify that the files listed in the Files field of the .dsc are
289 those expected given the announced Format.
291 @type dsc_filename: string
292 @param dsc_filename: path of .dsc file
295 @param dsc: the content of the .dsc parsed by C{parse_changes()}
297 @type dsc_files: dict
298 @param dsc_files: the file list returned by C{build_file_list()}
301 @return: all errors detected
305 # Ensure .dsc lists proper set of source files according to the format
307 has = defaultdict(lambda: 0)
310 (r'orig\.tar\.(gz|bz2|xz)\.asc', ('orig_tar_sig',)),
311 (r'orig\.tar\.gz', ('orig_tar_gz', 'orig_tar')),
312 (r'diff\.gz', ('debian_diff',)),
313 (r'tar\.gz', ('native_tar_gz', 'native_tar')),
314 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
315 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
316 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
317 (r'orig-.+\.tar\.(gz|bz2|xz)\.asc', ('more_orig_tar_sig',)),
318 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
322 m = re_issource.match(f)
324 rejmsg.append("%s: %s in Files field not recognised as source."
328 # Populate 'has' dictionary by resolving keys in lookup table
330 for regex, keys in ftype_lookup:
331 if re.match(regex, m.group(3)):
337 # File does not match anything in lookup table; reject
339 rejmsg.append("%s: unexpected source file '%s'" % (dsc_filename, f))
342 # Check for multiple files
343 for file_type in ('orig_tar', 'orig_tar_sig', 'native_tar', 'debian_tar', 'debian_diff'):
344 if has[file_type] > 1:
345 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
347 # Source format specific tests
349 format = get_format_from_string(dsc['format'])
351 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
354 except UnknownFormatError:
355 # Not an error here for now
360 ################################################################################
362 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
364 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
367 # Make sure we have a Files: field to parse...
368 if not changes.has_key(field):
369 raise NoFilesFieldError
371 # Validate .changes Format: field
373 validate_changes_format(parse_format(changes['format']), field)
375 includes_section = (not is_a_dsc) and field == "files"
377 # Parse each entry/line:
378 for i in changes[field].split('\n'):
382 section = priority = ""
385 (md5, size, section, priority, name) = s
387 (md5, size, name) = s
389 raise ParseChangesError(i)
396 (section, component) = extract_component_from_section(section)
398 files[name] = dict(size=size, section=section,
399 priority=priority, component=component)
400 files[name][hashname] = md5
404 ################################################################################
406 def send_mail (message, filename="", whitelists=None):
407 """sendmail wrapper, takes _either_ a message string or a file as arguments
409 @type whitelists: list of (str or None)
410 @param whitelists: path to whitelists. C{None} or an empty list whitelists
411 everything, otherwise an address is whitelisted if it is
412 included in any of the lists.
413 In addition a global whitelist can be specified in
414 Dinstall::MailWhiteList.
417 maildir = Cnf.get('Dir::Mail')
419 path = os.path.join(maildir, datetime.datetime.now().isoformat())
420 path = find_next_free(path)
421 with open(path, 'w') as fh:
424 # Check whether we're supposed to be sending mail
425 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
428 # If we've been passed a string dump it into a temporary file
430 (fd, filename) = tempfile.mkstemp()
431 os.write (fd, message)
434 if whitelists is None or None in whitelists:
436 if Cnf.get('Dinstall::MailWhiteList', ''):
437 whitelists.append(Cnf['Dinstall::MailWhiteList'])
438 if len(whitelists) != 0:
439 with open_file(filename) as message_in:
440 message_raw = modemail.message_from_file(message_in)
443 for path in whitelists:
444 with open_file(path, 'r') as whitelist_in:
445 for line in whitelist_in:
446 if not re_whitespace_comment.match(line):
447 if re_re_mark.match(line):
448 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
450 whitelist.append(re.compile(re.escape(line.strip())))
453 fields = ["To", "Bcc", "Cc"]
456 value = message_raw.get(field, None)
459 for item in value.split(","):
460 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
466 if not mail_whitelisted:
467 print "Skipping {0} since it's not whitelisted".format(item)
471 # Doesn't have any mail in whitelist so remove the header
473 del message_raw[field]
475 message_raw.replace_header(field, ', '.join(match))
477 # Change message fields in order if we don't have a To header
478 if not message_raw.has_key("To"):
481 if message_raw.has_key(field):
482 message_raw[fields[-1]] = message_raw[field]
483 del message_raw[field]
486 # Clean up any temporary files
487 # and return, as we removed all recipients.
489 os.unlink (filename);
492 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
493 os.write (fd, message_raw.as_string(True));
497 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
499 raise SendmailFailedError(output)
501 # Clean up any temporary files
505 ################################################################################
507 def poolify (source, component=None):
508 if source[:3] == "lib":
509 return source[:4] + '/' + source + '/'
511 return source[:1] + '/' + source + '/'
513 ################################################################################
515 def move (src, dest, overwrite = 0, perms = 0o664):
516 if os.path.exists(dest) and os.path.isdir(dest):
519 dest_dir = os.path.dirname(dest)
520 if not os.path.lexists(dest_dir):
521 umask = os.umask(00000)
522 os.makedirs(dest_dir, 0o2775)
524 #print "Moving %s to %s..." % (src, dest)
525 if os.path.exists(dest) and os.path.isdir(dest):
526 dest += '/' + os.path.basename(src)
527 # Don't overwrite unless forced to
528 if os.path.lexists(dest):
530 fubar("Can't move %s to %s - file already exists." % (src, dest))
532 if not os.access(dest, os.W_OK):
533 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
534 shutil.copy2(src, dest)
535 os.chmod(dest, perms)
538 def copy (src, dest, overwrite = 0, perms = 0o664):
539 if os.path.exists(dest) and os.path.isdir(dest):
542 dest_dir = os.path.dirname(dest)
543 if not os.path.exists(dest_dir):
544 umask = os.umask(00000)
545 os.makedirs(dest_dir, 0o2775)
547 #print "Copying %s to %s..." % (src, dest)
548 if os.path.exists(dest) and os.path.isdir(dest):
549 dest += '/' + os.path.basename(src)
550 # Don't overwrite unless forced to
551 if os.path.lexists(dest):
553 raise FileExistsError
555 if not os.access(dest, os.W_OK):
556 raise CantOverwriteError
557 shutil.copy2(src, dest)
558 os.chmod(dest, perms)
560 ################################################################################
562 def which_conf_file ():
563 if os.getenv('DAK_CONFIG'):
564 return os.getenv('DAK_CONFIG')
566 res = socket.getfqdn()
567 # In case we allow local config files per user, try if one exists
568 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
569 homedir = os.getenv("HOME")
570 confpath = os.path.join(homedir, "/etc/dak.conf")
571 if os.path.exists(confpath):
572 apt_pkg.read_config_file_isc(Cnf,confpath)
574 # We are still in here, so there is no local config file or we do
575 # not allow local files. Do the normal stuff.
576 if Cnf.get("Config::" + res + "::DakConfig"):
577 return Cnf["Config::" + res + "::DakConfig"]
579 return default_config
581 ################################################################################
583 def TemplateSubst(subst_map, filename):
584 """ Perform a substition of template """
585 with open_file(filename) as templatefile:
586 template = templatefile.read()
587 for k, v in subst_map.iteritems():
588 template = template.replace(k, str(v))
591 ################################################################################
593 def fubar(msg, exit_code=1):
594 sys.stderr.write("E: %s\n" % (msg))
598 sys.stderr.write("W: %s\n" % (msg))
600 ################################################################################
602 # Returns the user name with a laughable attempt at rfc822 conformancy
603 # (read: removing stray periods).
605 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
608 return pwd.getpwuid(os.getuid())[0]
610 ################################################################################
620 return ("%d%s" % (c, t))
622 ################################################################################
624 def find_next_free (dest, too_many=100):
627 while os.path.lexists(dest) and extra < too_many:
628 dest = orig_dest + '.' + repr(extra)
630 if extra >= too_many:
631 raise NoFreeFilenameError
634 ################################################################################
636 def result_join (original, sep = '\t'):
638 for i in xrange(len(original)):
639 if original[i] == None:
640 resultlist.append("")
642 resultlist.append(original[i])
643 return sep.join(resultlist)
645 ################################################################################
647 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
649 for line in str.split('\n'):
651 if line or include_blank_lines:
652 out += "%s%s\n" % (prefix, line)
653 # Strip trailing new line
658 ################################################################################
660 def join_with_commas_and(list):
661 if len(list) == 0: return "nothing"
662 if len(list) == 1: return list[0]
663 return ", ".join(list[:-1]) + " and " + list[-1]
665 ################################################################################
670 (pkg, version, constraint) = atom
672 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
675 pp_deps.append(pp_dep)
676 return " |".join(pp_deps)
678 ################################################################################
683 ################################################################################
685 def parse_args(Options):
686 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
687 # XXX: This should go away and everything which calls it be converted
688 # to use SQLA properly. For now, we'll just fix it not to use
689 # the old Pg interface though
690 session = DBConn().session()
694 for suitename in split_args(Options["Suite"]):
695 suite = get_suite(suitename, session=session)
696 if not suite or suite.suite_id is None:
697 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
699 suite_ids_list.append(suite.suite_id)
701 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
703 fubar("No valid suite given.")
708 if Options["Component"]:
709 component_ids_list = []
710 for componentname in split_args(Options["Component"]):
711 component = get_component(componentname, session=session)
712 if component is None:
713 warn("component '%s' not recognised." % (componentname))
715 component_ids_list.append(component.component_id)
716 if component_ids_list:
717 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
719 fubar("No valid component given.")
723 # Process architecture
724 con_architectures = ""
726 if Options["Architecture"]:
728 for archname in split_args(Options["Architecture"]):
729 if archname == "source":
732 arch = get_architecture(archname, session=session)
734 warn("architecture '%s' not recognised." % (archname))
736 arch_ids_list.append(arch.arch_id)
738 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
741 fubar("No valid architecture given.")
745 return (con_suites, con_architectures, con_components, check_source)
747 ################################################################################
749 def arch_compare_sw (a, b):
751 Function for use in sorting lists of architectures.
753 Sorts normally except that 'source' dominates all others.
756 if a == "source" and b == "source":
765 ################################################################################
767 def split_args (s, dwim=True):
769 Split command line arguments which can be separated by either commas
770 or whitespace. If dwim is set, it will complain about string ending
771 in comma since this usually means someone did 'dak ls -a i386, m68k
772 foo' or something and the inevitable confusion resulting from 'm68k'
773 being treated as an argument is undesirable.
776 if s.find(",") == -1:
779 if s[-1:] == "," and dwim:
780 fubar("split_args: found trailing comma, spurious space maybe?")
783 ################################################################################
785 def gpg_keyring_args(keyrings=None):
787 keyrings = get_active_keyring_paths()
789 return " ".join(["--keyring %s" % x for x in keyrings])
791 ################################################################################
793 def gpg_get_key_addresses(fingerprint):
794 """retreive email addresses from gpg key uids for a given fingerprint"""
795 addresses = key_uid_email_cache.get(fingerprint)
796 if addresses != None:
800 with open(os.devnull, "wb") as devnull:
801 output = daklib.daksubprocess.check_output(
802 ["gpg", "--no-default-keyring"] + gpg_keyring_args().split() +
803 ["--with-colons", "--list-keys", fingerprint], stderr=devnull)
804 except subprocess.CalledProcessError:
807 for l in output.split('\n'):
809 if parts[0] not in ("uid", "pub"):
816 # Do not use unicode_escape, because it is locale-specific
817 uid = codecs.decode(uid, "string_escape").decode("utf-8")
818 except UnicodeDecodeError:
819 uid = uid.decode("latin1") # does not fail
820 m = re_parse_maintainer.match(uid)
824 address = address.encode("utf8") # dak still uses bytes
825 if address.endswith('@debian.org'):
826 # prefer @debian.org addresses
827 # TODO: maybe not hardcode the domain
828 addresses.insert(0, address)
830 addresses.append(address)
831 key_uid_email_cache[fingerprint] = addresses
834 ################################################################################
836 def get_logins_from_ldap(fingerprint='*'):
837 """retrieve login from LDAP linked to a given fingerprint"""
839 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
840 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
841 l = ldap.open(LDAPServer)
842 l.simple_bind_s('','')
843 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
844 '(keyfingerprint=%s)' % fingerprint,
845 ['uid', 'keyfingerprint'])
848 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
851 ################################################################################
853 def get_users_from_ldap():
854 """retrieve login and user names from LDAP"""
856 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
857 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
858 l = ldap.open(LDAPServer)
859 l.simple_bind_s('','')
860 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
861 '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
866 for k in ('cn', 'mn', 'sn'):
868 if elem[k][0] != '-':
869 name.append(elem[k][0])
872 users[' '.join(name)] = elem['uid'][0]
875 ################################################################################
877 def clean_symlink (src, dest, root):
879 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
882 src = src.replace(root, '', 1)
883 dest = dest.replace(root, '', 1)
884 dest = os.path.dirname(dest)
885 new_src = '../' * len(dest.split('/'))
888 ################################################################################
890 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
892 Return a secure and unique filename by pre-creating it.
895 @param directory: If non-null it will be the directory the file is pre-created in.
898 @param prefix: The filename will be prefixed with this string
901 @param suffix: The filename will end with this string
904 @param mode: If set the file will get chmodded to those permissions
907 @param group: If set the file will get chgrped to the specified group.
910 @return: Returns a pair (fd, name)
913 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
915 os.chmod(tfname, mode)
917 gid = grp.getgrnam(group).gr_gid
918 os.chown(tfname, -1, gid)
921 ################################################################################
923 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
925 Return a secure and unique directory by pre-creating it.
928 @param parent: If non-null it will be the directory the directory is pre-created in.
931 @param prefix: The filename will be prefixed with this string
934 @param suffix: The filename will end with this string
937 @param mode: If set the file will get chmodded to those permissions
940 @param group: If set the file will get chgrped to the specified group.
943 @return: Returns a pair (fd, name)
947 tfname = tempfile.mkdtemp(suffix, prefix, parent)
949 os.chmod(tfname, mode)
951 gid = grp.getgrnam(group).gr_gid
952 os.chown(tfname, -1, gid)
955 ################################################################################
957 def is_email_alias(email):
958 """ checks if the user part of the email is listed in the alias file """
960 if alias_cache == None:
961 aliasfn = which_alias_file()
964 for l in open(aliasfn):
965 alias_cache.add(l.split(':')[0])
966 uid = email.split('@')[0]
967 return uid in alias_cache
969 ################################################################################
971 def get_changes_files(from_dir):
973 Takes a directory and lists all .changes files in it (as well as chdir'ing
974 to the directory; this is due to broken behaviour on the part of p-u/p-a
975 when you're not in the right place)
977 Returns a list of filenames
980 # Much of the rest of p-u/p-a depends on being in the right place
982 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
984 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
988 ################################################################################
990 Cnf = config.Config().Cnf
992 ################################################################################
994 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
996 Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm
997 Well, actually it parsed a local copy, but let's document the source
1000 returns a dict associating source package name with a list of open wnpp
1001 bugs (Yes, there might be more than one)
1007 lines = f.readlines()
1008 except IOError as e:
1009 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1014 splited_line = line.split(": ", 1)
1015 if len(splited_line) > 1:
1016 wnpp[splited_line[0]] = splited_line[1].split("|")
1018 for source in wnpp.keys():
1020 for wnpp_bug in wnpp[source]:
1021 bug_no = re.search("(\d)+", wnpp_bug).group()
1027 ################################################################################
1029 def get_packages_from_ftp(root, suite, component, architecture):
1031 Returns an object containing apt_pkg-parseable data collected by
1032 aggregating Packages.gz files gathered for each architecture.
1035 @param root: path to ftp archive root directory
1038 @param suite: suite to extract files from
1040 @type component: string
1041 @param component: component to extract files from
1043 @type architecture: string
1044 @param architecture: architecture to extract files from
1047 @return: apt_pkg class containing package data
1049 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1050 (fd, temp_file) = temp_filename()
1051 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1053 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1054 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1055 if os.path.exists(filename):
1056 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1058 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1059 packages = open_file(temp_file)
1060 Packages = apt_pkg.TagFile(packages)
1061 os.unlink(temp_file)
1064 ################################################################################
1066 def deb_extract_control(fh):
1067 """extract DEBIAN/control from a binary package"""
1068 return apt_inst.DebFile(fh).control.extractdata("control")
1070 ################################################################################
1072 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1073 """mail addresses to contact for an upload
1075 @type maintainer: str
1076 @param maintainer: Maintainer field of the .changes file
1078 @type changed_by: str
1079 @param changed_by: Changed-By field of the .changes file
1081 @type fingerprint: str
1082 @param fingerprint: fingerprint of the key used to sign the upload
1085 @return: list of RFC 2047-encoded mail addresses to contact regarding
1088 addresses = [maintainer]
1089 if changed_by != maintainer:
1090 addresses.append(changed_by)
1092 fpr_addresses = gpg_get_key_addresses(fingerprint)
1093 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1094 addresses.append(fpr_addresses[0])
1096 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1097 return encoded_addresses
1099 ################################################################################
1101 def call_editor(text="", suffix=".txt"):
1102 """run editor and return the result as a string
1105 @param text: initial text
1108 @param suffix: extension for temporary file
1111 @return: string with the edited text
1113 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1114 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1118 daklib.daksubprocess.check_call([editor, tmp.name])
1119 return open(tmp.name, 'r').read()
1123 ################################################################################
1125 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False, include_arch_all=True):
1126 dbsuite = get_suite(suite, session)
1127 overridesuite = dbsuite
1128 if dbsuite.overridesuite is not None:
1129 overridesuite = get_suite(dbsuite.overridesuite, session)
1132 all_broken = defaultdict(lambda: defaultdict(set))
1134 all_arches = set(arches)
1136 all_arches = set(x.arch_string for x in get_suite_architectures(suite))
1137 all_arches -= set(["source", "all"])
1138 removal_set = set(removals)
1139 metakey_d = get_or_set_metadatakey("Depends", session)
1140 metakey_p = get_or_set_metadatakey("Provides", session)
1142 'suite_id': dbsuite.suite_id,
1143 'metakey_d_id': metakey_d.key_id,
1144 'metakey_p_id': metakey_p.key_id,
1146 if include_arch_all:
1147 rdep_architectures = all_arches | set(['all'])
1149 rdep_architectures = all_arches
1150 for architecture in rdep_architectures:
1153 virtual_packages = {}
1154 params['arch_id'] = get_architecture(architecture, session).arch_id
1157 SELECT b.package, s.source, c.name as component,
1158 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1159 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1161 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1162 JOIN source s ON b.source = s.id
1163 JOIN files_archive_map af ON b.file = af.file_id
1164 JOIN component c ON af.component_id = c.id
1165 WHERE b.architecture = :arch_id'''
1166 query = session.query('package', 'source', 'component', 'depends', 'provides'). \
1167 from_statement(statement).params(params)
1168 for package, source, component, depends, provides in query:
1169 sources[package] = source
1170 p2c[package] = component
1171 if depends is not None:
1172 deps[package] = depends
1173 # Maintain a counter for each virtual package. If a
1174 # Provides: exists, set the counter to 0 and count all
1175 # provides by a package not in the list for removal.
1176 # If the counter stays 0 at the end, we know that only
1177 # the to-be-removed packages provided this virtual
1179 if provides is not None:
1180 for virtual_pkg in provides.split(","):
1181 virtual_pkg = virtual_pkg.strip()
1182 if virtual_pkg == package: continue
1183 if not virtual_packages.has_key(virtual_pkg):
1184 virtual_packages[virtual_pkg] = 0
1185 if package not in removals:
1186 virtual_packages[virtual_pkg] += 1
1188 # If a virtual package is only provided by the to-be-removed
1189 # packages, treat the virtual package as to-be-removed too.
1190 removal_set.update(virtual_pkg for virtual_pkg in virtual_packages if not virtual_packages[virtual_pkg])
1192 # Check binary dependencies (Depends)
1193 for package in deps:
1194 if package in removals: continue
1196 parsed_dep = apt_pkg.parse_depends(deps[package])
1197 except ValueError as e:
1198 print "Error for package %s: %s" % (package, e)
1200 for dep in parsed_dep:
1201 # Check for partial breakage. If a package has a ORed
1202 # dependency, there is only a dependency problem if all
1203 # packages in the ORed depends will be removed.
1205 for dep_package, _, _ in dep:
1206 if dep_package in removals:
1208 if unsat == len(dep):
1209 component = p2c[package]
1210 source = sources[package]
1211 if component != "main":
1212 source = "%s/%s" % (source, component)
1213 all_broken[source][package].add(architecture)
1216 if all_broken and not quiet:
1218 print " - broken Depends:"
1220 print "# Broken Depends:"
1221 for source, bindict in sorted(all_broken.items()):
1223 for binary, arches in sorted(bindict.items()):
1224 if arches == all_arches or 'all' in arches:
1225 lines.append(binary)
1227 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1229 print ' %s: %s' % (source, lines[0])
1231 print '%s: %s' % (source, lines[0])
1232 for line in lines[1:]:
1234 print ' ' + ' ' * (len(source) + 2) + line
1236 print ' ' * (len(source) + 2) + line
1240 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1241 all_broken = defaultdict(set)
1242 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1243 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1244 if include_arch_all:
1245 metakey_ids = (metakey_bd.key_id, metakey_bdi.key_id)
1247 metakey_ids = (metakey_bd.key_id,)
1250 'suite_id': dbsuite.suite_id,
1251 'metakey_ids': metakey_ids,
1254 SELECT s.source, string_agg(sm.value, ', ') as build_dep
1256 JOIN source_metadata sm ON s.id = sm.src_id
1258 (SELECT src FROM newest_src_association
1259 WHERE suite = :suite_id)
1260 AND sm.key_id in :metakey_ids
1261 GROUP BY s.id, s.source'''
1262 query = session.query('source', 'build_dep').from_statement(statement). \
1264 for source, build_dep in query:
1265 if source in removals: continue
1267 if build_dep is not None:
1268 # Remove [arch] information since we want to see breakage on all arches
1269 build_dep = re_build_dep_arch.sub("", build_dep)
1271 parsed_dep = apt_pkg.parse_src_depends(build_dep)
1272 except ValueError as e:
1273 print "Error for source %s: %s" % (source, e)
1274 for dep in parsed_dep:
1276 for dep_package, _, _ in dep:
1277 if dep_package in removals:
1279 if unsat == len(dep):
1280 component, = session.query(Component.component_name) \
1281 .join(Component.overrides) \
1282 .filter(Override.suite == overridesuite) \
1283 .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
1284 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1287 if component != "main":
1288 key = "%s/%s" % (source, component)
1289 all_broken[key].add(pp_deps(dep))
1292 if all_broken and not quiet:
1294 print " - broken Build-Depends:"
1296 print "# Broken Build-Depends:"
1297 for source, bdeps in sorted(all_broken.items()):
1298 bdeps = sorted(bdeps)
1300 print ' %s: %s' % (source, bdeps[0])
1302 print '%s: %s' % (source, bdeps[0])
1303 for bdep in bdeps[1:]:
1305 print ' ' + ' ' * (len(source) + 2) + bdep
1307 print ' ' * (len(source) + 2) + bdep
1313 ################################################################################
1315 def parse_built_using(control):
1316 """source packages referenced via Built-Using
1318 @type control: dict-like
1319 @param control: control file to take Built-Using field from
1321 @rtype: list of (str, str)
1322 @return: list of (source_name, source_version) pairs
1324 built_using = control.get('Built-Using', None)
1325 if built_using is None:
1329 for dep in apt_pkg.parse_depends(built_using):
1330 assert len(dep) == 1, 'Alternatives are not allowed in Built-Using field'
1331 source_name, source_version, comp = dep[0]
1332 assert comp == '=', 'Built-Using must contain strict dependencies'
1333 bu.append((source_name, source_version))
1337 ################################################################################
1339 def is_in_debug_section(control):
1340 """binary package is a debug package
1342 @type control: dict-like
1343 @param control: control file of binary package
1346 @return: True if the binary package is a debug package
1348 section = control['Section'].split('/', 1)[-1]
1349 auto_built_package = control.get("Auto-Built-Package")
1350 return section == "debug" and auto_built_package == "debug-symbols"