2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
43 import email as modemail
48 import daklib.config as config
49 import daklib.daksubprocess
50 from dbconn import DBConn, get_architecture, get_component, get_suite, \
51 get_override_type, Keyring, session_wrapper, \
52 get_active_keyring_paths, get_primary_keyring_path, \
53 get_suite_architectures, get_or_set_metadatakey, DBSource, \
54 Component, Override, OverrideType
55 from sqlalchemy import desc
56 from dak_exceptions import *
57 from gpg import SignedFile
58 from textutils import fix_maintainer
59 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
60 re_multi_line_field, re_srchasver, re_taint_free, \
61 re_re_mark, re_whitespace_comment, re_issource, \
62 re_is_orig_source, re_build_dep_arch, re_parse_maintainer
64 from formats import parse_format, validate_changes_format
65 from srcformats import get_format_from_string
66 from collections import defaultdict
68 ################################################################################
70 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
72 alias_cache = None #: Cache for email alias checks
73 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
75 # (hashname, function, earliest_changes_version)
76 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
77 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
79 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
80 # code in lenny's Python. This also affects commands.getoutput and
82 def dak_getstatusoutput(cmd):
83 pipe = daklib.daksubprocess.Popen(cmd, shell=True, universal_newlines=True,
84 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
86 output = pipe.stdout.read()
90 if output[-1:] == '\n':
98 commands.getstatusoutput = dak_getstatusoutput
100 ################################################################################
103 """ Escape html chars """
104 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
106 ################################################################################
108 def open_file(filename, mode='r'):
110 Open C{file}, return fileobject.
112 @type filename: string
113 @param filename: path/filename to open
116 @param mode: open mode
119 @return: open fileobject
121 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
125 f = open(filename, mode)
127 raise CantOpenError(filename)
130 ################################################################################
132 def our_raw_input(prompt=""):
136 sys.stdout.write(prompt)
145 sys.stderr.write("\nUser interrupt (^D).\n")
148 ################################################################################
150 def extract_component_from_section(section, session=None):
153 if section.find('/') != -1:
154 component = section.split('/')[0]
156 # Expand default component
160 return (section, component)
162 ################################################################################
164 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
165 require_signature = True
168 require_signature = False
170 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
171 contents = signed_file.contents
176 # Split the lines in the input, keeping the linebreaks.
177 lines = contents.splitlines(True)
180 raise ParseChangesError("[Empty changes file]")
182 # Reindex by line number so we can easily verify the format of
188 indexed_lines[index] = line[:-1]
190 num_of_lines = len(indexed_lines.keys())
193 while index < num_of_lines:
195 line = indexed_lines[index]
196 if line == "" and signing_rules == 1:
197 if index != num_of_lines:
198 raise InvalidDscError(index)
200 slf = re_single_line_field.match(line)
202 field = slf.groups()[0].lower()
203 changes[field] = slf.groups()[1]
207 changes[field] += '\n'
209 mlf = re_multi_line_field.match(line)
212 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
213 if first == 1 and changes[field] != "":
214 changes[field] += '\n'
216 changes[field] += mlf.groups()[0] + '\n'
220 changes["filecontents"] = armored_contents
222 if changes.has_key("source"):
223 # Strip the source version in brackets from the source field,
224 # put it in the "source-version" field instead.
225 srcver = re_srchasver.search(changes["source"])
227 changes["source"] = srcver.group(1)
228 changes["source-version"] = srcver.group(2)
231 raise ParseChangesError(error)
235 ################################################################################
237 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
239 Parses a changes file and returns a dictionary where each field is a
240 key. The mandatory first argument is the filename of the .changes
243 signing_rules is an optional argument:
245 - If signing_rules == -1, no signature is required.
246 - If signing_rules == 0 (the default), a signature is required.
247 - If signing_rules == 1, it turns on the same strict format checking
250 The rules for (signing_rules == 1)-mode are:
252 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
253 followed by any PGP header data and must end with a blank line.
255 - The data section must end with a blank line and must be followed by
256 "-----BEGIN PGP SIGNATURE-----".
259 with open_file(filename) as changes_in:
260 content = changes_in.read()
262 unicode(content, 'utf-8')
264 raise ChangesUnicodeError("Changes file not proper utf-8")
265 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
269 # Finally ensure that everything needed for .changes is there
270 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
271 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
274 for keyword in must_keywords:
275 if not changes.has_key(keyword.lower()):
276 missingfields.append(keyword)
278 if len(missingfields):
279 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
283 ################################################################################
285 def hash_key(hashname):
286 return '%ssum' % hashname
288 ################################################################################
290 def check_dsc_files(dsc_filename, dsc, dsc_files):
292 Verify that the files listed in the Files field of the .dsc are
293 those expected given the announced Format.
295 @type dsc_filename: string
296 @param dsc_filename: path of .dsc file
299 @param dsc: the content of the .dsc parsed by C{parse_changes()}
301 @type dsc_files: dict
302 @param dsc_files: the file list returned by C{build_file_list()}
305 @return: all errors detected
309 # Ensure .dsc lists proper set of source files according to the format
311 has = defaultdict(lambda: 0)
314 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
315 (r'diff.gz', ('debian_diff',)),
316 (r'tar.gz', ('native_tar_gz', 'native_tar')),
317 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
318 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
319 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
320 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
324 m = re_issource.match(f)
326 rejmsg.append("%s: %s in Files field not recognised as source."
330 # Populate 'has' dictionary by resolving keys in lookup table
332 for regex, keys in ftype_lookup:
333 if re.match(regex, m.group(3)):
339 # File does not match anything in lookup table; reject
341 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
343 # Check for multiple files
344 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
345 if has[file_type] > 1:
346 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
348 # Source format specific tests
350 format = get_format_from_string(dsc['format'])
352 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
355 except UnknownFormatError:
356 # Not an error here for now
361 ################################################################################
363 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
365 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
368 # Make sure we have a Files: field to parse...
369 if not changes.has_key(field):
370 raise NoFilesFieldError
372 # Validate .changes Format: field
374 validate_changes_format(parse_format(changes['format']), field)
376 includes_section = (not is_a_dsc) and field == "files"
378 # Parse each entry/line:
379 for i in changes[field].split('\n'):
383 section = priority = ""
386 (md5, size, section, priority, name) = s
388 (md5, size, name) = s
390 raise ParseChangesError(i)
397 (section, component) = extract_component_from_section(section)
399 files[name] = dict(size=size, section=section,
400 priority=priority, component=component)
401 files[name][hashname] = md5
405 ################################################################################
407 def send_mail (message, filename="", whitelists=None):
408 """sendmail wrapper, takes _either_ a message string or a file as arguments
410 @type whitelists: list of (str or None)
411 @param whitelists: path to whitelists. C{None} or an empty list whitelists
412 everything, otherwise an address is whitelisted if it is
413 included in any of the lists.
414 In addition a global whitelist can be specified in
415 Dinstall::MailWhiteList.
418 maildir = Cnf.get('Dir::Mail')
420 path = os.path.join(maildir, datetime.datetime.now().isoformat())
421 path = find_next_free(path)
422 with open(path, 'w') as fh:
425 # Check whether we're supposed to be sending mail
426 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
429 # If we've been passed a string dump it into a temporary file
431 (fd, filename) = tempfile.mkstemp()
432 os.write (fd, message)
435 if whitelists is None or None in whitelists:
437 if Cnf.get('Dinstall::MailWhiteList', ''):
438 whitelists.append(Cnf['Dinstall::MailWhiteList'])
439 if len(whitelists) != 0:
440 with open_file(filename) as message_in:
441 message_raw = modemail.message_from_file(message_in)
444 for path in whitelists:
445 with open_file(path, 'r') as whitelist_in:
446 for line in whitelist_in:
447 if not re_whitespace_comment.match(line):
448 if re_re_mark.match(line):
449 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
451 whitelist.append(re.compile(re.escape(line.strip())))
454 fields = ["To", "Bcc", "Cc"]
457 value = message_raw.get(field, None)
460 for item in value.split(","):
461 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
467 if not mail_whitelisted:
468 print "Skipping {0} since it's not whitelisted".format(item)
472 # Doesn't have any mail in whitelist so remove the header
474 del message_raw[field]
476 message_raw.replace_header(field, ', '.join(match))
478 # Change message fields in order if we don't have a To header
479 if not message_raw.has_key("To"):
482 if message_raw.has_key(field):
483 message_raw[fields[-1]] = message_raw[field]
484 del message_raw[field]
487 # Clean up any temporary files
488 # and return, as we removed all recipients.
490 os.unlink (filename);
493 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
494 os.write (fd, message_raw.as_string(True));
498 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
500 raise SendmailFailedError(output)
502 # Clean up any temporary files
506 ################################################################################
508 def poolify (source, component=None):
509 if source[:3] == "lib":
510 return source[:4] + '/' + source + '/'
512 return source[:1] + '/' + source + '/'
514 ################################################################################
516 def move (src, dest, overwrite = 0, perms = 0o664):
517 if os.path.exists(dest) and os.path.isdir(dest):
520 dest_dir = os.path.dirname(dest)
521 if not os.path.lexists(dest_dir):
522 umask = os.umask(00000)
523 os.makedirs(dest_dir, 0o2775)
525 #print "Moving %s to %s..." % (src, dest)
526 if os.path.exists(dest) and os.path.isdir(dest):
527 dest += '/' + os.path.basename(src)
528 # Don't overwrite unless forced to
529 if os.path.lexists(dest):
531 fubar("Can't move %s to %s - file already exists." % (src, dest))
533 if not os.access(dest, os.W_OK):
534 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
535 shutil.copy2(src, dest)
536 os.chmod(dest, perms)
539 def copy (src, dest, overwrite = 0, perms = 0o664):
540 if os.path.exists(dest) and os.path.isdir(dest):
543 dest_dir = os.path.dirname(dest)
544 if not os.path.exists(dest_dir):
545 umask = os.umask(00000)
546 os.makedirs(dest_dir, 0o2775)
548 #print "Copying %s to %s..." % (src, dest)
549 if os.path.exists(dest) and os.path.isdir(dest):
550 dest += '/' + os.path.basename(src)
551 # Don't overwrite unless forced to
552 if os.path.lexists(dest):
554 raise FileExistsError
556 if not os.access(dest, os.W_OK):
557 raise CantOverwriteError
558 shutil.copy2(src, dest)
559 os.chmod(dest, perms)
561 ################################################################################
563 def which_conf_file ():
564 if os.getenv('DAK_CONFIG'):
565 return os.getenv('DAK_CONFIG')
567 res = socket.getfqdn()
568 # In case we allow local config files per user, try if one exists
569 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
570 homedir = os.getenv("HOME")
571 confpath = os.path.join(homedir, "/etc/dak.conf")
572 if os.path.exists(confpath):
573 apt_pkg.read_config_file_isc(Cnf,confpath)
575 # We are still in here, so there is no local config file or we do
576 # not allow local files. Do the normal stuff.
577 if Cnf.get("Config::" + res + "::DakConfig"):
578 return Cnf["Config::" + res + "::DakConfig"]
580 return default_config
582 ################################################################################
584 def TemplateSubst(subst_map, filename):
585 """ Perform a substition of template """
586 with open_file(filename) as templatefile:
587 template = templatefile.read()
588 for k, v in subst_map.iteritems():
589 template = template.replace(k, str(v))
592 ################################################################################
594 def fubar(msg, exit_code=1):
595 sys.stderr.write("E: %s\n" % (msg))
599 sys.stderr.write("W: %s\n" % (msg))
601 ################################################################################
603 # Returns the user name with a laughable attempt at rfc822 conformancy
604 # (read: removing stray periods).
606 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
609 return pwd.getpwuid(os.getuid())[0]
611 ################################################################################
621 return ("%d%s" % (c, t))
623 ################################################################################
625 def find_next_free (dest, too_many=100):
628 while os.path.lexists(dest) and extra < too_many:
629 dest = orig_dest + '.' + repr(extra)
631 if extra >= too_many:
632 raise NoFreeFilenameError
635 ################################################################################
637 def result_join (original, sep = '\t'):
639 for i in xrange(len(original)):
640 if original[i] == None:
641 resultlist.append("")
643 resultlist.append(original[i])
644 return sep.join(resultlist)
646 ################################################################################
648 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
650 for line in str.split('\n'):
652 if line or include_blank_lines:
653 out += "%s%s\n" % (prefix, line)
654 # Strip trailing new line
659 ################################################################################
661 def join_with_commas_and(list):
662 if len(list) == 0: return "nothing"
663 if len(list) == 1: return list[0]
664 return ", ".join(list[:-1]) + " and " + list[-1]
666 ################################################################################
671 (pkg, version, constraint) = atom
673 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
676 pp_deps.append(pp_dep)
677 return " |".join(pp_deps)
679 ################################################################################
684 ################################################################################
686 def parse_args(Options):
687 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
688 # XXX: This should go away and everything which calls it be converted
689 # to use SQLA properly. For now, we'll just fix it not to use
690 # the old Pg interface though
691 session = DBConn().session()
695 for suitename in split_args(Options["Suite"]):
696 suite = get_suite(suitename, session=session)
697 if not suite or suite.suite_id is None:
698 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
700 suite_ids_list.append(suite.suite_id)
702 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
704 fubar("No valid suite given.")
709 if Options["Component"]:
710 component_ids_list = []
711 for componentname in split_args(Options["Component"]):
712 component = get_component(componentname, session=session)
713 if component is None:
714 warn("component '%s' not recognised." % (componentname))
716 component_ids_list.append(component.component_id)
717 if component_ids_list:
718 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
720 fubar("No valid component given.")
724 # Process architecture
725 con_architectures = ""
727 if Options["Architecture"]:
729 for archname in split_args(Options["Architecture"]):
730 if archname == "source":
733 arch = get_architecture(archname, session=session)
735 warn("architecture '%s' not recognised." % (archname))
737 arch_ids_list.append(arch.arch_id)
739 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
742 fubar("No valid architecture given.")
746 return (con_suites, con_architectures, con_components, check_source)
748 ################################################################################
750 def arch_compare_sw (a, b):
752 Function for use in sorting lists of architectures.
754 Sorts normally except that 'source' dominates all others.
757 if a == "source" and b == "source":
766 ################################################################################
768 def split_args (s, dwim=True):
770 Split command line arguments which can be separated by either commas
771 or whitespace. If dwim is set, it will complain about string ending
772 in comma since this usually means someone did 'dak ls -a i386, m68k
773 foo' or something and the inevitable confusion resulting from 'm68k'
774 being treated as an argument is undesirable.
777 if s.find(",") == -1:
780 if s[-1:] == "," and dwim:
781 fubar("split_args: found trailing comma, spurious space maybe?")
784 ################################################################################
786 def gpg_keyring_args(keyrings=None):
788 keyrings = get_active_keyring_paths()
790 return " ".join(["--keyring %s" % x for x in keyrings])
792 ################################################################################
794 def gpg_get_key_addresses(fingerprint):
795 """retreive email addresses from gpg key uids for a given fingerprint"""
796 addresses = key_uid_email_cache.get(fingerprint)
797 if addresses != None:
801 with open(os.devnull, "wb") as devnull:
802 output = daklib.daksubprocess.check_output(
803 ["gpg", "--no-default-keyring"] + gpg_keyring_args().split() +
804 ["--with-colons", "--list-keys", fingerprint], stderr=devnull)
805 except subprocess.CalledProcessError:
808 for l in output.split('\n'):
810 if parts[0] not in ("uid", "pub"):
817 # Do not use unicode_escape, because it is locale-specific
818 uid = codecs.decode(uid, "string_escape").decode("utf-8")
819 except UnicodeDecodeError:
820 uid = uid.decode("latin1") # does not fail
821 m = re_parse_maintainer.match(uid)
825 address = address.encode("utf8") # dak still uses bytes
826 if address.endswith('@debian.org'):
827 # prefer @debian.org addresses
828 # TODO: maybe not hardcode the domain
829 addresses.insert(0, address)
831 addresses.append(address)
832 key_uid_email_cache[fingerprint] = addresses
835 ################################################################################
837 def get_logins_from_ldap(fingerprint='*'):
838 """retrieve login from LDAP linked to a given fingerprint"""
840 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
841 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
842 l = ldap.open(LDAPServer)
843 l.simple_bind_s('','')
844 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
845 '(keyfingerprint=%s)' % fingerprint,
846 ['uid', 'keyfingerprint'])
849 login[elem[1]['keyFingerPrint'][0]] = elem[1]['uid'][0]
852 ################################################################################
854 def get_users_from_ldap():
855 """retrieve login and user names from LDAP"""
857 LDAPDn = Cnf['Import-LDAP-Fingerprints::LDAPDn']
858 LDAPServer = Cnf['Import-LDAP-Fingerprints::LDAPServer']
859 l = ldap.open(LDAPServer)
860 l.simple_bind_s('','')
861 Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL,
862 '(uid=*)', ['uid', 'cn', 'mn', 'sn'])
867 for k in ('cn', 'mn', 'sn'):
869 if elem[k][0] != '-':
870 name.append(elem[k][0])
873 users[' '.join(name)] = elem['uid'][0]
876 ################################################################################
878 def clean_symlink (src, dest, root):
880 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
883 src = src.replace(root, '', 1)
884 dest = dest.replace(root, '', 1)
885 dest = os.path.dirname(dest)
886 new_src = '../' * len(dest.split('/'))
889 ################################################################################
891 def temp_filename(directory=None, prefix="dak", suffix="", mode=None, group=None):
893 Return a secure and unique filename by pre-creating it.
896 @param directory: If non-null it will be the directory the file is pre-created in.
899 @param prefix: The filename will be prefixed with this string
902 @param suffix: The filename will end with this string
905 @param mode: If set the file will get chmodded to those permissions
908 @param group: If set the file will get chgrped to the specified group.
911 @return: Returns a pair (fd, name)
914 (tfd, tfname) = tempfile.mkstemp(suffix, prefix, directory)
916 os.chmod(tfname, mode)
918 gid = grp.getgrnam(group).gr_gid
919 os.chown(tfname, -1, gid)
922 ################################################################################
924 def temp_dirname(parent=None, prefix="dak", suffix="", mode=None, group=None):
926 Return a secure and unique directory by pre-creating it.
929 @param parent: If non-null it will be the directory the directory is pre-created in.
932 @param prefix: The filename will be prefixed with this string
935 @param suffix: The filename will end with this string
938 @param mode: If set the file will get chmodded to those permissions
941 @param group: If set the file will get chgrped to the specified group.
944 @return: Returns a pair (fd, name)
948 tfname = tempfile.mkdtemp(suffix, prefix, parent)
950 os.chmod(tfname, mode)
952 gid = grp.getgrnam(group).gr_gid
953 os.chown(tfname, -1, gid)
956 ################################################################################
958 def is_email_alias(email):
959 """ checks if the user part of the email is listed in the alias file """
961 if alias_cache == None:
962 aliasfn = which_alias_file()
965 for l in open(aliasfn):
966 alias_cache.add(l.split(':')[0])
967 uid = email.split('@')[0]
968 return uid in alias_cache
970 ################################################################################
972 def get_changes_files(from_dir):
974 Takes a directory and lists all .changes files in it (as well as chdir'ing
975 to the directory; this is due to broken behaviour on the part of p-u/p-a
976 when you're not in the right place)
978 Returns a list of filenames
981 # Much of the rest of p-u/p-a depends on being in the right place
983 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
985 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
989 ################################################################################
991 Cnf = config.Config().Cnf
993 ################################################################################
995 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
997 Parses the wnpp bug list available at https://qa.debian.org/data/bts/wnpp_rm
998 Well, actually it parsed a local copy, but let's document the source
1001 returns a dict associating source package name with a list of open wnpp
1002 bugs (Yes, there might be more than one)
1008 lines = f.readlines()
1009 except IOError as e:
1010 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1015 splited_line = line.split(": ", 1)
1016 if len(splited_line) > 1:
1017 wnpp[splited_line[0]] = splited_line[1].split("|")
1019 for source in wnpp.keys():
1021 for wnpp_bug in wnpp[source]:
1022 bug_no = re.search("(\d)+", wnpp_bug).group()
1028 ################################################################################
1030 def get_packages_from_ftp(root, suite, component, architecture):
1032 Returns an object containing apt_pkg-parseable data collected by
1033 aggregating Packages.gz files gathered for each architecture.
1036 @param root: path to ftp archive root directory
1039 @param suite: suite to extract files from
1041 @type component: string
1042 @param component: component to extract files from
1044 @type architecture: string
1045 @param architecture: architecture to extract files from
1048 @return: apt_pkg class containing package data
1050 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1051 (fd, temp_file) = temp_filename()
1052 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1054 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1055 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1056 if os.path.exists(filename):
1057 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1059 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1060 packages = open_file(temp_file)
1061 Packages = apt_pkg.TagFile(packages)
1062 os.unlink(temp_file)
1065 ################################################################################
1067 def deb_extract_control(fh):
1068 """extract DEBIAN/control from a binary package"""
1069 return apt_inst.DebFile(fh).control.extractdata("control")
1071 ################################################################################
1073 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1074 """mail addresses to contact for an upload
1076 @type maintainer: str
1077 @param maintainer: Maintainer field of the .changes file
1079 @type changed_by: str
1080 @param changed_by: Changed-By field of the .changes file
1082 @type fingerprint: str
1083 @param fingerprint: fingerprint of the key used to sign the upload
1086 @return: list of RFC 2047-encoded mail addresses to contact regarding
1089 addresses = [maintainer]
1090 if changed_by != maintainer:
1091 addresses.append(changed_by)
1093 fpr_addresses = gpg_get_key_addresses(fingerprint)
1094 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1095 addresses.append(fpr_addresses[0])
1097 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1098 return encoded_addresses
1100 ################################################################################
1102 def call_editor(text="", suffix=".txt"):
1103 """run editor and return the result as a string
1106 @param text: initial text
1109 @param suffix: extension for temporary file
1112 @return: string with the edited text
1114 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1115 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1119 daklib.daksubprocess.check_call([editor, tmp.name])
1120 return open(tmp.name, 'r').read()
1124 ################################################################################
1126 def check_reverse_depends(removals, suite, arches=None, session=None, cruft=False, quiet=False):
1127 dbsuite = get_suite(suite, session)
1128 overridesuite = dbsuite
1129 if dbsuite.overridesuite is not None:
1130 overridesuite = get_suite(dbsuite.overridesuite, session)
1133 all_broken = defaultdict(lambda: defaultdict(set))
1135 all_arches = set(arches)
1137 all_arches = set(x.arch_string for x in get_suite_architectures(suite))
1138 all_arches -= set(["source", "all"])
1139 metakey_d = get_or_set_metadatakey("Depends", session)
1140 metakey_p = get_or_set_metadatakey("Provides", session)
1142 'suite_id': dbsuite.suite_id,
1143 'metakey_d_id': metakey_d.key_id,
1144 'metakey_p_id': metakey_p.key_id,
1146 for architecture in all_arches | set(['all']):
1149 virtual_packages = {}
1150 params['arch_id'] = get_architecture(architecture, session).arch_id
1153 SELECT b.package, s.source, c.name as component,
1154 (SELECT bmd.value FROM binaries_metadata bmd WHERE bmd.bin_id = b.id AND bmd.key_id = :metakey_d_id) AS depends,
1155 (SELECT bmp.value FROM binaries_metadata bmp WHERE bmp.bin_id = b.id AND bmp.key_id = :metakey_p_id) AS provides
1157 JOIN bin_associations ba ON b.id = ba.bin AND ba.suite = :suite_id
1158 JOIN source s ON b.source = s.id
1159 JOIN files_archive_map af ON b.file = af.file_id
1160 JOIN component c ON af.component_id = c.id
1161 WHERE b.architecture = :arch_id'''
1162 query = session.query('package', 'source', 'component', 'depends', 'provides'). \
1163 from_statement(statement).params(params)
1164 for package, source, component, depends, provides in query:
1165 sources[package] = source
1166 p2c[package] = component
1167 if depends is not None:
1168 deps[package] = depends
1169 # Maintain a counter for each virtual package. If a
1170 # Provides: exists, set the counter to 0 and count all
1171 # provides by a package not in the list for removal.
1172 # If the counter stays 0 at the end, we know that only
1173 # the to-be-removed packages provided this virtual
1175 if provides is not None:
1176 for virtual_pkg in provides.split(","):
1177 virtual_pkg = virtual_pkg.strip()
1178 if virtual_pkg == package: continue
1179 if not virtual_packages.has_key(virtual_pkg):
1180 virtual_packages[virtual_pkg] = 0
1181 if package not in removals:
1182 virtual_packages[virtual_pkg] += 1
1184 # If a virtual package is only provided by the to-be-removed
1185 # packages, treat the virtual package as to-be-removed too.
1186 for virtual_pkg in virtual_packages:
1187 if virtual_packages[virtual_pkg] == 0:
1188 removals.append(virtual_pkg)
1190 # Check binary dependencies (Depends)
1191 for package in deps:
1192 if package in removals: continue
1194 parsed_dep = apt_pkg.parse_depends(deps[package])
1195 except ValueError as e:
1196 print "Error for package %s: %s" % (package, e)
1198 for dep in parsed_dep:
1199 # Check for partial breakage. If a package has a ORed
1200 # dependency, there is only a dependency problem if all
1201 # packages in the ORed depends will be removed.
1203 for dep_package, _, _ in dep:
1204 if dep_package in removals:
1206 if unsat == len(dep):
1207 component = p2c[package]
1208 source = sources[package]
1209 if component != "main":
1210 source = "%s/%s" % (source, component)
1211 all_broken[source][package].add(architecture)
1214 if all_broken and not quiet:
1216 print " - broken Depends:"
1218 print "# Broken Depends:"
1219 for source, bindict in sorted(all_broken.items()):
1221 for binary, arches in sorted(bindict.items()):
1222 if arches == all_arches or 'all' in arches:
1223 lines.append(binary)
1225 lines.append('%s [%s]' % (binary, ' '.join(sorted(arches))))
1227 print ' %s: %s' % (source, lines[0])
1229 print '%s: %s' % (source, lines[0])
1230 for line in lines[1:]:
1232 print ' ' + ' ' * (len(source) + 2) + line
1234 print ' ' * (len(source) + 2) + line
1238 # Check source dependencies (Build-Depends and Build-Depends-Indep)
1239 all_broken = defaultdict(set)
1240 metakey_bd = get_or_set_metadatakey("Build-Depends", session)
1241 metakey_bdi = get_or_set_metadatakey("Build-Depends-Indep", session)
1243 'suite_id': dbsuite.suite_id,
1244 'metakey_ids': (metakey_bd.key_id, metakey_bdi.key_id),
1247 SELECT s.source, string_agg(sm.value, ', ') as build_dep
1249 JOIN source_metadata sm ON s.id = sm.src_id
1251 (SELECT source FROM src_associations
1252 WHERE suite = :suite_id)
1253 AND sm.key_id in :metakey_ids
1254 GROUP BY s.id, s.source'''
1255 query = session.query('source', 'build_dep').from_statement(statement). \
1257 for source, build_dep in query:
1258 if source in removals: continue
1260 if build_dep is not None:
1261 # Remove [arch] information since we want to see breakage on all arches
1262 build_dep = re_build_dep_arch.sub("", build_dep)
1264 parsed_dep = apt_pkg.parse_src_depends(build_dep)
1265 except ValueError as e:
1266 print "Error for source %s: %s" % (source, e)
1267 for dep in parsed_dep:
1269 for dep_package, _, _ in dep:
1270 if dep_package in removals:
1272 if unsat == len(dep):
1273 component, = session.query(Component.component_name) \
1274 .join(Component.overrides) \
1275 .filter(Override.suite == overridesuite) \
1276 .filter(Override.package == re.sub('/(contrib|non-free)$', '', source)) \
1277 .join(Override.overridetype).filter(OverrideType.overridetype == 'dsc') \
1280 if component != "main":
1281 key = "%s/%s" % (source, component)
1282 all_broken[key].add(pp_deps(dep))
1285 if all_broken and not quiet:
1287 print " - broken Build-Depends:"
1289 print "# Broken Build-Depends:"
1290 for source, bdeps in sorted(all_broken.items()):
1291 bdeps = sorted(bdeps)
1293 print ' %s: %s' % (source, bdeps[0])
1295 print '%s: %s' % (source, bdeps[0])
1296 for bdep in bdeps[1:]:
1298 print ' ' + ' ' * (len(source) + 2) + bdep
1300 print ' ' * (len(source) + 2) + bdep