2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
43 from dak_exceptions import *
44 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
45 re_multi_line_field, re_srchasver, re_verwithext, \
46 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
49 ################################################################################
51 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
52 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
54 alias_cache = None #: Cache for email alias checks
55 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
57 # (hashname, function, earliest_changes_version)
58 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
59 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
61 ################################################################################
64 """ Escape html chars """
65 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
67 ################################################################################
69 def open_file(filename, mode='r'):
71 Open C{file}, return fileobject.
73 @type filename: string
74 @param filename: path/filename to open
77 @param mode: open mode
80 @return: open fileobject
82 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
86 f = open(filename, mode)
88 raise CantOpenError, filename
91 ################################################################################
93 def our_raw_input(prompt=""):
95 sys.stdout.write(prompt)
101 sys.stderr.write("\nUser interrupt (^D).\n")
104 ################################################################################
106 def extract_component_from_section(section):
109 if section.find('/') != -1:
110 component = section.split('/')[0]
112 # Expand default component
114 if Cnf.has_key("Component::%s" % section):
119 return (section, component)
121 ################################################################################
123 def parse_deb822(contents, signing_rules=0):
127 # Split the lines in the input, keeping the linebreaks.
128 lines = contents.splitlines(True)
131 raise ParseChangesError, "[Empty changes file]"
133 # Reindex by line number so we can easily verify the format of
139 indexed_lines[index] = line[:-1]
143 num_of_lines = len(indexed_lines.keys())
146 while index < num_of_lines:
148 line = indexed_lines[index]
150 if signing_rules == 1:
152 if index > num_of_lines:
153 raise InvalidDscError, index
154 line = indexed_lines[index]
155 if not line.startswith("-----BEGIN PGP SIGNATURE"):
156 raise InvalidDscError, index
161 if line.startswith("-----BEGIN PGP SIGNATURE"):
163 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
165 if signing_rules == 1:
166 while index < num_of_lines and line != "":
168 line = indexed_lines[index]
170 # If we're not inside the signed data, don't process anything
171 if signing_rules >= 0 and not inside_signature:
173 slf = re_single_line_field.match(line)
175 field = slf.groups()[0].lower()
176 changes[field] = slf.groups()[1]
180 changes[field] += '\n'
182 mlf = re_multi_line_field.match(line)
185 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
186 if first == 1 and changes[field] != "":
187 changes[field] += '\n'
189 changes[field] += mlf.groups()[0] + '\n'
193 if signing_rules == 1 and inside_signature:
194 raise InvalidDscError, index
196 changes["filecontents"] = "".join(lines)
198 if changes.has_key("source"):
199 # Strip the source version in brackets from the source field,
200 # put it in the "source-version" field instead.
201 srcver = re_srchasver.search(changes["source"])
203 changes["source"] = srcver.group(1)
204 changes["source-version"] = srcver.group(2)
207 raise ParseChangesError, error
211 ################################################################################
213 def parse_changes(filename, signing_rules=0):
215 Parses a changes file and returns a dictionary where each field is a
216 key. The mandatory first argument is the filename of the .changes
219 signing_rules is an optional argument:
221 - If signing_rules == -1, no signature is required.
222 - If signing_rules == 0 (the default), a signature is required.
223 - If signing_rules == 1, it turns on the same strict format checking
226 The rules for (signing_rules == 1)-mode are:
228 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
229 followed by any PGP header data and must end with a blank line.
231 - The data section must end with a blank line and must be followed by
232 "-----BEGIN PGP SIGNATURE-----".
235 changes_in = open_file(filename)
236 content = changes_in.read()
239 unicode(content, 'utf-8')
241 raise ChangesUnicodeError, "Changes file not proper utf-8"
242 return parse_deb822(content, signing_rules)
244 ################################################################################
246 def hash_key(hashname):
247 return '%ssum' % hashname
249 ################################################################################
251 def create_hash(where, files, hashname, hashfunc):
253 create_hash extends the passed files dict with the given hash by
254 iterating over all files on disk and passing them to the hashing
259 for f in files.keys():
261 file_handle = open_file(f)
262 except CantOpenError:
263 rejmsg.append("Could not open file %s for checksumming" % (f))
266 files[f][hash_key(hashname)] = hashfunc(file_handle)
271 ################################################################################
273 def check_hash(where, files, hashname, hashfunc):
275 check_hash checks the given hash in the files dict against the actual
276 files on disk. The hash values need to be present consistently in
277 all file entries. It does not modify its input in any way.
281 for f in files.keys():
285 file_handle = open_file(f)
287 # Check for the hash entry, to not trigger a KeyError.
288 if not files[f].has_key(hash_key(hashname)):
289 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
293 # Actually check the hash for correctness.
294 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
295 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
297 except CantOpenError:
298 # TODO: This happens when the file is in the pool.
299 # warn("Cannot open file %s" % f)
306 ################################################################################
308 def check_size(where, files):
310 check_size checks the file sizes in the passed files dict against the
315 for f in files.keys():
320 # TODO: This happens when the file is in the pool.
324 actual_size = entry[stat.ST_SIZE]
325 size = int(files[f]["size"])
326 if size != actual_size:
327 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
328 % (f, actual_size, size, where))
331 ################################################################################
333 def check_hash_fields(what, manifest):
335 check_hash_fields ensures that there are no checksum fields in the
336 given dict that we do not know about.
340 hashes = map(lambda x: x[0], known_hashes)
341 for field in manifest:
342 if field.startswith("checksums-"):
343 hashname = field.split("-",1)[1]
344 if hashname not in hashes:
345 rejmsg.append("Unsupported checksum field for %s "\
346 "in %s" % (hashname, what))
349 ################################################################################
351 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
352 if format >= version:
353 # The version should contain the specified hash.
356 # Import hashes from the changes
357 rejmsg = parse_checksums(".changes", files, changes, hashname)
361 # We need to calculate the hash because it can't possibly
364 return func(".changes", files, hashname, hashfunc)
366 # We could add the orig which might be in the pool to the files dict to
367 # access the checksums easily.
369 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
371 ensure_dsc_hashes' task is to ensure that each and every *present* hash
372 in the dsc is correct, i.e. identical to the changes file and if necessary
373 the pool. The latter task is delegated to check_hash.
377 if not dsc.has_key('Checksums-%s' % (hashname,)):
379 # Import hashes from the dsc
380 parse_checksums(".dsc", dsc_files, dsc, hashname)
382 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
385 ################################################################################
387 def ensure_hashes(changes, dsc, files, dsc_files):
390 # Make sure we recognise the format of the Files: field in the .changes
391 format = changes.get("format", "0.0").split(".", 1)
393 format = int(format[0]), int(format[1])
395 format = int(float(format[0])), 0
397 # We need to deal with the original changes blob, as the fields we need
398 # might not be in the changes dict serialised into the .dak anymore.
399 orig_changes = parse_deb822(changes['filecontents'])
401 # Copy the checksums over to the current changes dict. This will keep
402 # the existing modifications to it intact.
403 for field in orig_changes:
404 if field.startswith('checksums-'):
405 changes[field] = orig_changes[field]
407 # Check for unsupported hashes
408 rejmsg.extend(check_hash_fields(".changes", changes))
409 rejmsg.extend(check_hash_fields(".dsc", dsc))
411 # We have to calculate the hash if we have an earlier changes version than
412 # the hash appears in rather than require it exist in the changes file
413 for hashname, hashfunc, version in known_hashes:
414 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
416 if "source" in changes["architecture"]:
417 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
422 def parse_checksums(where, files, manifest, hashname):
424 field = 'checksums-%s' % hashname
425 if not field in manifest:
427 for line in manifest[field].split('\n'):
430 checksum, size, checkfile = line.strip().split(' ')
431 if not files.has_key(checkfile):
432 # TODO: check for the file's entry in the original files dict, not
433 # the one modified by (auto)byhand and other weird stuff
434 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
435 # (file, hashname, where))
437 if not files[checkfile]["size"] == size:
438 rejmsg.append("%s: size differs for files and checksums-%s entry "\
439 "in %s" % (checkfile, hashname, where))
441 files[checkfile][hash_key(hashname)] = checksum
442 for f in files.keys():
443 if not files[f].has_key(hash_key(hashname)):
444 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
448 ################################################################################
450 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
452 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
455 # Make sure we have a Files: field to parse...
456 if not changes.has_key(field):
457 raise NoFilesFieldError
459 # Make sure we recognise the format of the Files: field
460 format = re_verwithext.search(changes.get("format", "0.0"))
462 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
464 format = format.groups()
465 if format[1] == None:
466 format = int(float(format[0])), 0, format[2]
468 format = int(format[0]), int(format[1]), format[2]
469 if format[2] == None:
473 # format = (1,0) are the only formats we currently accept,
474 # format = (0,0) are missing format headers of which we still
475 # have some in the archive.
476 if format != (1,0) and format != (0,0):
477 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
479 if (format < (1,5) or format > (1,8)):
480 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
481 if field != "files" and format < (1,8):
482 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
484 includes_section = (not is_a_dsc) and field == "files"
486 # Parse each entry/line:
487 for i in changes[field].split('\n'):
491 section = priority = ""
494 (md5, size, section, priority, name) = s
496 (md5, size, name) = s
498 raise ParseChangesError, i
505 (section, component) = extract_component_from_section(section)
507 files[name] = Dict(size=size, section=section,
508 priority=priority, component=component)
509 files[name][hashname] = md5
513 ################################################################################
515 def force_to_utf8(s):
517 Forces a string to UTF-8. If the string isn't already UTF-8,
518 it's assumed to be ISO-8859-1.
524 latin1_s = unicode(s,'iso8859-1')
525 return latin1_s.encode('utf-8')
527 def rfc2047_encode(s):
529 Encodes a (header) string per RFC2047 if necessary. If the
530 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
533 codecs.lookup('ascii')[1](s)
538 codecs.lookup('utf-8')[1](s)
539 h = email.Header.Header(s, 'utf-8', 998)
542 h = email.Header.Header(s, 'iso-8859-1', 998)
545 ################################################################################
547 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
548 # with it. I know - I'll fix the suckage and make things
551 def fix_maintainer (maintainer):
553 Parses a Maintainer or Changed-By field and returns:
554 1. an RFC822 compatible version,
555 2. an RFC2047 compatible version,
559 The name is forced to UTF-8 for both 1. and 3.. If the name field
560 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
561 switched to 'email (name)' format.
564 maintainer = maintainer.strip()
566 return ('', '', '', '')
568 if maintainer.find("<") == -1:
571 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
572 email = maintainer[1:-1]
575 m = re_parse_maintainer.match(maintainer)
577 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
581 # Get an RFC2047 compliant version of the name
582 rfc2047_name = rfc2047_encode(name)
584 # Force the name to be UTF-8
585 name = force_to_utf8(name)
587 if name.find(',') != -1 or name.find('.') != -1:
588 rfc822_maint = "%s (%s)" % (email, name)
589 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
591 rfc822_maint = "%s <%s>" % (name, email)
592 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
594 if email.find("@") == -1 and email.find("buildd_") != 0:
595 raise ParseMaintError, "No @ found in email address part."
597 return (rfc822_maint, rfc2047_maint, name, email)
599 ################################################################################
601 def send_mail (message, filename=""):
602 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
604 # If we've been passed a string dump it into a temporary file
606 (fd, filename) = tempfile.mkstemp()
607 os.write (fd, message)
610 if Cnf.has_key("Dinstall::MailWhiteList") and \
611 Cnf["Dinstall::MailWhiteList"] != "":
612 message_in = open_file(filename)
613 message_raw = modemail.message_from_file(message_in)
617 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
619 for line in whitelist_in:
620 if not re_whitespace_comment.match(line):
621 if re_re_mark.match(line):
622 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
624 whitelist.append(re.compile(re.escape(line.strip())))
629 fields = ["To", "Bcc", "Cc"]
632 value = message_raw.get(field, None)
635 for item in value.split(","):
636 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
642 if not mail_whitelisted:
643 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
647 # Doesn't have any mail in whitelist so remove the header
649 del message_raw[field]
651 message_raw.replace_header(field, string.join(match, ", "))
653 # Change message fields in order if we don't have a To header
654 if not message_raw.has_key("To"):
657 if message_raw.has_key(field):
658 message_raw[fields[-1]] = message_raw[field]
659 del message_raw[field]
662 # Clean up any temporary files
663 # and return, as we removed all recipients.
665 os.unlink (filename);
668 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
669 os.write (fd, message_raw.as_string(True));
673 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
675 raise SendmailFailedError, output
677 # Clean up any temporary files
681 ################################################################################
683 def poolify (source, component):
686 if source[:3] == "lib":
687 return component + source[:4] + '/' + source + '/'
689 return component + source[:1] + '/' + source + '/'
691 ################################################################################
693 def move (src, dest, overwrite = 0, perms = 0664):
694 if os.path.exists(dest) and os.path.isdir(dest):
697 dest_dir = os.path.dirname(dest)
698 if not os.path.exists(dest_dir):
699 umask = os.umask(00000)
700 os.makedirs(dest_dir, 02775)
702 #print "Moving %s to %s..." % (src, dest)
703 if os.path.exists(dest) and os.path.isdir(dest):
704 dest += '/' + os.path.basename(src)
705 # Don't overwrite unless forced to
706 if os.path.exists(dest):
708 fubar("Can't move %s to %s - file already exists." % (src, dest))
710 if not os.access(dest, os.W_OK):
711 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
712 shutil.copy2(src, dest)
713 os.chmod(dest, perms)
716 def copy (src, dest, overwrite = 0, perms = 0664):
717 if os.path.exists(dest) and os.path.isdir(dest):
720 dest_dir = os.path.dirname(dest)
721 if not os.path.exists(dest_dir):
722 umask = os.umask(00000)
723 os.makedirs(dest_dir, 02775)
725 #print "Copying %s to %s..." % (src, dest)
726 if os.path.exists(dest) and os.path.isdir(dest):
727 dest += '/' + os.path.basename(src)
728 # Don't overwrite unless forced to
729 if os.path.exists(dest):
731 raise FileExistsError
733 if not os.access(dest, os.W_OK):
734 raise CantOverwriteError
735 shutil.copy2(src, dest)
736 os.chmod(dest, perms)
738 ################################################################################
741 res = socket.gethostbyaddr(socket.gethostname())
742 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
743 if database_hostname:
744 return database_hostname
748 def which_conf_file ():
749 res = socket.gethostbyaddr(socket.gethostname())
750 # In case we allow local config files per user, try if one exists
751 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
752 homedir = os.getenv("HOME")
753 confpath = os.path.join(homedir, "/etc/dak.conf")
754 if os.path.exists(confpath):
755 apt_pkg.ReadConfigFileISC(Cnf,default_config)
757 # We are still in here, so there is no local config file or we do
758 # not allow local files. Do the normal stuff.
759 if Cnf.get("Config::" + res[0] + "::DakConfig"):
760 return Cnf["Config::" + res[0] + "::DakConfig"]
762 return default_config
764 def which_apt_conf_file ():
765 res = socket.gethostbyaddr(socket.gethostname())
766 # In case we allow local config files per user, try if one exists
767 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
768 homedir = os.getenv("HOME")
769 confpath = os.path.join(homedir, "/etc/dak.conf")
770 if os.path.exists(confpath):
771 apt_pkg.ReadConfigFileISC(Cnf,default_config)
773 if Cnf.get("Config::" + res[0] + "::AptConfig"):
774 return Cnf["Config::" + res[0] + "::AptConfig"]
776 return default_apt_config
778 def which_alias_file():
779 hostname = socket.gethostbyaddr(socket.gethostname())[0]
780 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
781 if os.path.exists(aliasfn):
786 ################################################################################
788 # Escape characters which have meaning to SQL's regex comparison operator ('~')
789 # (woefully incomplete)
792 s = s.replace('+', '\\\\+')
793 s = s.replace('.', '\\\\.')
796 ################################################################################
798 def TemplateSubst(map, filename):
799 """ Perform a substition of template """
800 templatefile = open_file(filename)
801 template = templatefile.read()
803 template = template.replace(x,map[x])
807 ################################################################################
809 def fubar(msg, exit_code=1):
810 sys.stderr.write("E: %s\n" % (msg))
814 sys.stderr.write("W: %s\n" % (msg))
816 ################################################################################
818 # Returns the user name with a laughable attempt at rfc822 conformancy
819 # (read: removing stray periods).
821 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
824 return pwd.getpwuid(os.getuid())[0]
826 ################################################################################
836 return ("%d%s" % (c, t))
838 ################################################################################
840 def cc_fix_changes (changes):
841 o = changes.get("architecture", "")
843 del changes["architecture"]
844 changes["architecture"] = {}
846 changes["architecture"][j] = 1
848 def changes_compare (a, b):
849 """ Sort by source name, source version, 'have source', and then by filename """
851 a_changes = parse_changes(a)
856 b_changes = parse_changes(b)
860 cc_fix_changes (a_changes)
861 cc_fix_changes (b_changes)
863 # Sort by source name
864 a_source = a_changes.get("source")
865 b_source = b_changes.get("source")
866 q = cmp (a_source, b_source)
870 # Sort by source version
871 a_version = a_changes.get("version", "0")
872 b_version = b_changes.get("version", "0")
873 q = apt_pkg.VersionCompare(a_version, b_version)
877 # Sort by 'have source'
878 a_has_source = a_changes["architecture"].get("source")
879 b_has_source = b_changes["architecture"].get("source")
880 if a_has_source and not b_has_source:
882 elif b_has_source and not a_has_source:
885 # Fall back to sort by filename
888 ################################################################################
890 def find_next_free (dest, too_many=100):
893 while os.path.exists(dest) and extra < too_many:
894 dest = orig_dest + '.' + repr(extra)
896 if extra >= too_many:
897 raise NoFreeFilenameError
900 ################################################################################
902 def result_join (original, sep = '\t'):
904 for i in xrange(len(original)):
905 if original[i] == None:
906 resultlist.append("")
908 resultlist.append(original[i])
909 return sep.join(resultlist)
911 ################################################################################
913 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
915 for line in str.split('\n'):
917 if line or include_blank_lines:
918 out += "%s%s\n" % (prefix, line)
919 # Strip trailing new line
924 ################################################################################
926 def validate_changes_file_arg(filename, require_changes=1):
928 'filename' is either a .changes or .dak file. If 'filename' is a
929 .dak file, it's changed to be the corresponding .changes file. The
930 function then checks if the .changes file a) exists and b) is
931 readable and returns the .changes filename if so. If there's a
932 problem, the next action depends on the option 'require_changes'
935 - If 'require_changes' == -1, errors are ignored and the .changes
936 filename is returned.
937 - If 'require_changes' == 0, a warning is given and 'None' is returned.
938 - If 'require_changes' == 1, a fatal error is raised.
943 orig_filename = filename
944 if filename.endswith(".dak"):
945 filename = filename[:-4]+".changes"
947 if not filename.endswith(".changes"):
948 error = "invalid file type; not a changes file"
950 if not os.access(filename,os.R_OK):
951 if os.path.exists(filename):
952 error = "permission denied"
954 error = "file not found"
957 if require_changes == 1:
958 fubar("%s: %s." % (orig_filename, error))
959 elif require_changes == 0:
960 warn("Skipping %s - %s" % (orig_filename, error))
962 else: # We only care about the .dak file
967 ################################################################################
970 return (arch != "source" and arch != "all")
972 ################################################################################
974 def join_with_commas_and(list):
975 if len(list) == 0: return "nothing"
976 if len(list) == 1: return list[0]
977 return ", ".join(list[:-1]) + " and " + list[-1]
979 ################################################################################
984 (pkg, version, constraint) = atom
986 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
989 pp_deps.append(pp_dep)
990 return " |".join(pp_deps)
992 ################################################################################
997 ################################################################################
999 def parse_args(Options):
1000 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1002 if Options["Suite"]:
1004 for suite in split_args(Options["Suite"]):
1005 suite_id = database.get_suite_id(suite)
1007 warn("suite '%s' not recognised." % (suite))
1009 suite_ids_list.append(suite_id)
1011 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1013 fubar("No valid suite given.")
1018 if Options["Component"]:
1019 component_ids_list = []
1020 for component in split_args(Options["Component"]):
1021 component_id = database.get_component_id(component)
1022 if component_id == -1:
1023 warn("component '%s' not recognised." % (component))
1025 component_ids_list.append(component_id)
1026 if component_ids_list:
1027 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1029 fubar("No valid component given.")
1033 # Process architecture
1034 con_architectures = ""
1035 if Options["Architecture"]:
1038 for architecture in split_args(Options["Architecture"]):
1039 if architecture == "source":
1042 architecture_id = database.get_architecture_id(architecture)
1043 if architecture_id == -1:
1044 warn("architecture '%s' not recognised." % (architecture))
1046 arch_ids_list.append(architecture_id)
1048 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1050 if not check_source:
1051 fubar("No valid architecture given.")
1055 return (con_suites, con_architectures, con_components, check_source)
1057 ################################################################################
1059 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1060 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1063 tb = sys.exc_info()[2]
1070 frame = frame.f_back
1072 traceback.print_exc()
1074 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1075 frame.f_code.co_filename,
1077 for key, value in frame.f_locals.items():
1078 print "\t%20s = " % key,
1082 print "<unable to print>"
1084 ################################################################################
1086 def try_with_debug(function):
1094 ################################################################################
1096 def arch_compare_sw (a, b):
1098 Function for use in sorting lists of architectures.
1100 Sorts normally except that 'source' dominates all others.
1103 if a == "source" and b == "source":
1112 ################################################################################
1114 def split_args (s, dwim=1):
1116 Split command line arguments which can be separated by either commas
1117 or whitespace. If dwim is set, it will complain about string ending
1118 in comma since this usually means someone did 'dak ls -a i386, m68k
1119 foo' or something and the inevitable confusion resulting from 'm68k'
1120 being treated as an argument is undesirable.
1123 if s.find(",") == -1:
1126 if s[-1:] == "," and dwim:
1127 fubar("split_args: found trailing comma, spurious space maybe?")
1130 ################################################################################
1132 def Dict(**dict): return dict
1134 ########################################
1136 def gpgv_get_status_output(cmd, status_read, status_write):
1138 Our very own version of commands.getouputstatus(), hacked to support
1142 cmd = ['/bin/sh', '-c', cmd]
1143 p2cread, p2cwrite = os.pipe()
1144 c2pread, c2pwrite = os.pipe()
1145 errout, errin = os.pipe()
1155 for i in range(3, 256):
1156 if i != status_write:
1162 os.execvp(cmd[0], cmd)
1168 os.dup2(c2pread, c2pwrite)
1169 os.dup2(errout, errin)
1171 output = status = ""
1173 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1176 r = os.read(fd, 8196)
1178 more_data.append(fd)
1179 if fd == c2pwrite or fd == errin:
1181 elif fd == status_read:
1184 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1186 pid, exit_status = os.waitpid(pid, 0)
1188 os.close(status_write)
1189 os.close(status_read)
1199 return output, status, exit_status
1201 ################################################################################
1203 def process_gpgv_output(status):
1204 # Process the status-fd output
1207 for line in status.split('\n'):
1211 split = line.split()
1213 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1215 (gnupg, keyword) = split[:2]
1216 if gnupg != "[GNUPG:]":
1217 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1220 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1221 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1224 keywords[keyword] = args
1226 return (keywords, internal_error)
1228 ################################################################################
1230 def retrieve_key (filename, keyserver=None, keyring=None):
1232 Retrieve the key that signed 'filename' from 'keyserver' and
1233 add it to 'keyring'. Returns nothing on success, or an error message
1237 # Defaults for keyserver and keyring
1239 keyserver = Cnf["Dinstall::KeyServer"]
1241 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1243 # Ensure the filename contains no shell meta-characters or other badness
1244 if not re_taint_free.match(filename):
1245 return "%s: tainted filename" % (filename)
1247 # Invoke gpgv on the file
1248 status_read, status_write = os.pipe()
1249 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1250 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1252 # Process the status-fd output
1253 (keywords, internal_error) = process_gpgv_output(status)
1255 return internal_error
1257 if not keywords.has_key("NO_PUBKEY"):
1258 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1260 fingerprint = keywords["NO_PUBKEY"][0]
1261 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1262 # it'll try to create a lockfile in /dev. A better solution might
1263 # be a tempfile or something.
1264 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1265 % (Cnf["Dinstall::SigningKeyring"])
1266 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1267 % (keyring, keyserver, fingerprint)
1268 (result, output) = commands.getstatusoutput(cmd)
1270 return "'%s' failed with exit code %s" % (cmd, result)
1274 ################################################################################
1276 def gpg_keyring_args(keyrings=None):
1278 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1280 return " ".join(["--keyring %s" % x for x in keyrings])
1282 ################################################################################
1284 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1286 Check the signature of a file and return the fingerprint if the
1287 signature is valid or 'None' if it's not. The first argument is the
1288 filename whose signature should be checked. The second argument is a
1289 reject function and is called when an error is found. The reject()
1290 function must allow for two arguments: the first is the error message,
1291 the second is an optional prefix string. It's possible for reject()
1292 to be called more than once during an invocation of check_signature().
1293 The third argument is optional and is the name of the files the
1294 detached signature applies to. The fourth argument is optional and is
1295 a *list* of keyrings to use. 'autofetch' can either be None, True or
1296 False. If None, the default behaviour specified in the config will be
1300 # Ensure the filename contains no shell meta-characters or other badness
1301 if not re_taint_free.match(sig_filename):
1302 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1305 if data_filename and not re_taint_free.match(data_filename):
1306 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1310 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1312 # Autofetch the signing key if that's enabled
1313 if autofetch == None:
1314 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1316 error_msg = retrieve_key(sig_filename)
1321 # Build the command line
1322 status_read, status_write = os.pipe()
1323 cmd = "gpgv --status-fd %s %s %s %s" % (
1324 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1326 # Invoke gpgv on the file
1327 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1329 # Process the status-fd output
1330 (keywords, internal_error) = process_gpgv_output(status)
1332 # If we failed to parse the status-fd output, let's just whine and bail now
1334 reject("internal error while performing signature check on %s." % (sig_filename))
1335 reject(internal_error, "")
1336 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1340 # Now check for obviously bad things in the processed output
1341 if keywords.has_key("KEYREVOKED"):
1342 reject("The key used to sign %s has been revoked." % (sig_filename))
1344 if keywords.has_key("BADSIG"):
1345 reject("bad signature on %s." % (sig_filename))
1347 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1348 reject("failed to check signature on %s." % (sig_filename))
1350 if keywords.has_key("NO_PUBKEY"):
1351 args = keywords["NO_PUBKEY"]
1354 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1356 if keywords.has_key("BADARMOR"):
1357 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1359 if keywords.has_key("NODATA"):
1360 reject("no signature found in %s." % (sig_filename))
1362 if keywords.has_key("EXPKEYSIG"):
1363 args = keywords["EXPKEYSIG"]
1366 reject("Signature made by expired key 0x%s" % (key))
1368 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1369 args = keywords["KEYEXPIRED"]
1373 if timestamp.count("T") == 0:
1375 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1377 expiredate = "unknown (%s)" % (timestamp)
1379 expiredate = timestamp
1380 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1386 # Next check gpgv exited with a zero return code
1388 reject("gpgv failed while checking %s." % (sig_filename))
1390 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1392 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1395 # Sanity check the good stuff we expect
1396 if not keywords.has_key("VALIDSIG"):
1397 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1400 args = keywords["VALIDSIG"]
1402 reject("internal error while checking signature on %s." % (sig_filename))
1405 fingerprint = args[0]
1406 if not keywords.has_key("GOODSIG"):
1407 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1409 if not keywords.has_key("SIG_ID"):
1410 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1413 # Finally ensure there's not something we don't recognise
1414 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1415 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1416 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1418 for keyword in keywords.keys():
1419 if not known_keywords.has_key(keyword):
1420 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1428 ################################################################################
1430 def gpg_get_key_addresses(fingerprint):
1431 """retreive email addresses from gpg key uids for a given fingerprint"""
1432 addresses = key_uid_email_cache.get(fingerprint)
1433 if addresses != None:
1436 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1437 % (gpg_keyring_args(), fingerprint)
1438 (result, output) = commands.getstatusoutput(cmd)
1440 for l in output.split('\n'):
1441 m = re_gpg_uid.match(l)
1443 addresses.add(m.group(1))
1444 key_uid_email_cache[fingerprint] = addresses
1447 ################################################################################
1449 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1451 def wrap(paragraph, max_length, prefix=""):
1455 words = paragraph.split()
1458 word_size = len(word)
1459 if word_size > max_length:
1461 s += line + '\n' + prefix
1462 s += word + '\n' + prefix
1465 new_length = len(line) + word_size + 1
1466 if new_length > max_length:
1467 s += line + '\n' + prefix
1480 ################################################################################
1482 def clean_symlink (src, dest, root):
1484 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1487 src = src.replace(root, '', 1)
1488 dest = dest.replace(root, '', 1)
1489 dest = os.path.dirname(dest)
1490 new_src = '../' * len(dest.split('/'))
1491 return new_src + src
1493 ################################################################################
1495 def temp_filename(directory=None, prefix="dak", suffix=""):
1497 Return a secure and unique filename by pre-creating it.
1498 If 'directory' is non-null, it will be the directory the file is pre-created in.
1499 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1500 If 'suffix' is non-null, the filename will end with it.
1502 Returns a pair (fd, name).
1505 return tempfile.mkstemp(suffix, prefix, directory)
1507 ################################################################################
1509 def temp_dirname(parent=None, prefix="dak", suffix=""):
1511 Return a secure and unique directory by pre-creating it.
1512 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1513 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1514 If 'suffix' is non-null, the filename will end with it.
1516 Returns a pathname to the new directory
1519 return tempfile.mkdtemp(suffix, prefix, parent)
1521 ################################################################################
1523 def is_email_alias(email):
1524 """ checks if the user part of the email is listed in the alias file """
1526 if alias_cache == None:
1527 aliasfn = which_alias_file()
1530 for l in open(aliasfn):
1531 alias_cache.add(l.split(':')[0])
1532 uid = email.split('@')[0]
1533 return uid in alias_cache
1535 ################################################################################
1537 def get_changes_files(dir):
1539 Takes a directory and lists all .changes files in it (as well as chdir'ing
1540 to the directory; this is due to broken behaviour on the part of p-u/p-a
1541 when you're not in the right place)
1543 Returns a list of filenames
1546 # Much of the rest of p-u/p-a depends on being in the right place
1548 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1550 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1552 return changes_files
1554 ################################################################################
1558 Cnf = apt_pkg.newConfiguration()
1559 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1561 if which_conf_file() != default_config:
1562 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1564 ###############################################################################