2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
43 from dak_exceptions import *
44 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
45 re_multi_line_field, re_srchasver, re_verwithext, \
46 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
49 ################################################################################
51 #default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
52 default_config = "/home/stew/etc/dak/dak.conf" #: default dak config, defines host properties
53 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
55 alias_cache = None #: Cache for email alias checks
56 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
58 # (hashname, function, earliest_changes_version)
59 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
60 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
62 ################################################################################
65 """ Escape html chars """
66 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
68 ################################################################################
70 def open_file(filename, mode='r'):
72 Open C{file}, return fileobject.
74 @type filename: string
75 @param filename: path/filename to open
78 @param mode: open mode
81 @return: open fileobject
83 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
87 f = open(filename, mode)
89 raise CantOpenError, filename
92 ################################################################################
94 def our_raw_input(prompt=""):
96 sys.stdout.write(prompt)
102 sys.stderr.write("\nUser interrupt (^D).\n")
105 ################################################################################
107 def extract_component_from_section(section):
110 if section.find('/') != -1:
111 component = section.split('/')[0]
113 # Expand default component
115 if Cnf.has_key("Component::%s" % section):
120 return (section, component)
122 ################################################################################
124 def parse_deb822(contents, signing_rules=0):
128 # Split the lines in the input, keeping the linebreaks.
129 lines = contents.splitlines(True)
132 raise ParseChangesError, "[Empty changes file]"
134 # Reindex by line number so we can easily verify the format of
140 indexed_lines[index] = line[:-1]
144 num_of_lines = len(indexed_lines.keys())
147 while index < num_of_lines:
149 line = indexed_lines[index]
151 if signing_rules == 1:
153 if index > num_of_lines:
154 raise InvalidDscError, index
155 line = indexed_lines[index]
156 if not line.startswith("-----BEGIN PGP SIGNATURE"):
157 raise InvalidDscError, index
162 if line.startswith("-----BEGIN PGP SIGNATURE"):
164 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
166 if signing_rules == 1:
167 while index < num_of_lines and line != "":
169 line = indexed_lines[index]
171 # If we're not inside the signed data, don't process anything
172 if signing_rules >= 0 and not inside_signature:
174 slf = re_single_line_field.match(line)
176 field = slf.groups()[0].lower()
177 changes[field] = slf.groups()[1]
181 changes[field] += '\n'
183 mlf = re_multi_line_field.match(line)
186 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
187 if first == 1 and changes[field] != "":
188 changes[field] += '\n'
190 changes[field] += mlf.groups()[0] + '\n'
194 if signing_rules == 1 and inside_signature:
195 raise InvalidDscError, index
197 changes["filecontents"] = "".join(lines)
199 if changes.has_key("source"):
200 # Strip the source version in brackets from the source field,
201 # put it in the "source-version" field instead.
202 srcver = re_srchasver.search(changes["source"])
204 changes["source"] = srcver.group(1)
205 changes["source-version"] = srcver.group(2)
208 raise ParseChangesError, error
212 ################################################################################
214 def parse_changes(filename, signing_rules=0):
216 Parses a changes file and returns a dictionary where each field is a
217 key. The mandatory first argument is the filename of the .changes
220 signing_rules is an optional argument:
222 - If signing_rules == -1, no signature is required.
223 - If signing_rules == 0 (the default), a signature is required.
224 - If signing_rules == 1, it turns on the same strict format checking
227 The rules for (signing_rules == 1)-mode are:
229 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
230 followed by any PGP header data and must end with a blank line.
232 - The data section must end with a blank line and must be followed by
233 "-----BEGIN PGP SIGNATURE-----".
236 changes_in = open_file(filename)
237 content = changes_in.read()
240 unicode(content, 'utf-8')
242 raise ChangesUnicodeError, "Changes file not proper utf-8"
243 return parse_deb822(content, signing_rules)
245 ################################################################################
247 def hash_key(hashname):
248 return '%ssum' % hashname
250 ################################################################################
252 def create_hash(where, files, hashname, hashfunc):
254 create_hash extends the passed files dict with the given hash by
255 iterating over all files on disk and passing them to the hashing
260 for f in files.keys():
262 file_handle = open_file(f)
263 except CantOpenError:
264 rejmsg.append("Could not open file %s for checksumming" % (f))
267 files[f][hash_key(hashname)] = hashfunc(file_handle)
272 ################################################################################
274 def check_hash(where, files, hashname, hashfunc):
276 check_hash checks the given hash in the files dict against the actual
277 files on disk. The hash values need to be present consistently in
278 all file entries. It does not modify its input in any way.
282 for f in files.keys():
286 file_handle = open_file(f)
288 # Check for the hash entry, to not trigger a KeyError.
289 if not files[f].has_key(hash_key(hashname)):
290 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
294 # Actually check the hash for correctness.
295 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
296 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
298 except CantOpenError:
299 # TODO: This happens when the file is in the pool.
300 # warn("Cannot open file %s" % f)
307 ################################################################################
309 def check_size(where, files):
311 check_size checks the file sizes in the passed files dict against the
316 for f in files.keys():
321 # TODO: This happens when the file is in the pool.
325 actual_size = entry[stat.ST_SIZE]
326 size = int(files[f]["size"])
327 if size != actual_size:
328 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
329 % (f, actual_size, size, where))
332 ################################################################################
334 def check_hash_fields(what, manifest):
336 check_hash_fields ensures that there are no checksum fields in the
337 given dict that we do not know about.
341 hashes = map(lambda x: x[0], known_hashes)
342 for field in manifest:
343 if field.startswith("checksums-"):
344 hashname = field.split("-",1)[1]
345 if hashname not in hashes:
346 rejmsg.append("Unsupported checksum field for %s "\
347 "in %s" % (hashname, what))
350 ################################################################################
352 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
353 if format >= version:
354 # The version should contain the specified hash.
357 # Import hashes from the changes
358 rejmsg = parse_checksums(".changes", files, changes, hashname)
362 # We need to calculate the hash because it can't possibly
365 return func(".changes", files, hashname, hashfunc)
367 # We could add the orig which might be in the pool to the files dict to
368 # access the checksums easily.
370 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
372 ensure_dsc_hashes' task is to ensure that each and every *present* hash
373 in the dsc is correct, i.e. identical to the changes file and if necessary
374 the pool. The latter task is delegated to check_hash.
378 if not dsc.has_key('Checksums-%s' % (hashname,)):
380 # Import hashes from the dsc
381 parse_checksums(".dsc", dsc_files, dsc, hashname)
383 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
386 ################################################################################
388 def ensure_hashes(changes, dsc, files, dsc_files):
391 # Make sure we recognise the format of the Files: field in the .changes
392 format = changes.get("format", "0.0").split(".", 1)
394 format = int(format[0]), int(format[1])
396 format = int(float(format[0])), 0
398 # We need to deal with the original changes blob, as the fields we need
399 # might not be in the changes dict serialised into the .dak anymore.
400 orig_changes = parse_deb822(changes['filecontents'])
402 # Copy the checksums over to the current changes dict. This will keep
403 # the existing modifications to it intact.
404 for field in orig_changes:
405 if field.startswith('checksums-'):
406 changes[field] = orig_changes[field]
408 # Check for unsupported hashes
409 rejmsg.extend(check_hash_fields(".changes", changes))
410 rejmsg.extend(check_hash_fields(".dsc", dsc))
412 # We have to calculate the hash if we have an earlier changes version than
413 # the hash appears in rather than require it exist in the changes file
414 for hashname, hashfunc, version in known_hashes:
415 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
417 if "source" in changes["architecture"]:
418 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
423 def parse_checksums(where, files, manifest, hashname):
425 field = 'checksums-%s' % hashname
426 if not field in manifest:
428 for line in manifest[field].split('\n'):
431 checksum, size, checkfile = line.strip().split(' ')
432 if not files.has_key(checkfile):
433 # TODO: check for the file's entry in the original files dict, not
434 # the one modified by (auto)byhand and other weird stuff
435 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
436 # (file, hashname, where))
438 if not files[checkfile]["size"] == size:
439 rejmsg.append("%s: size differs for files and checksums-%s entry "\
440 "in %s" % (checkfile, hashname, where))
442 files[checkfile][hash_key(hashname)] = checksum
443 for f in files.keys():
444 if not files[f].has_key(hash_key(hashname)):
445 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
449 ################################################################################
451 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
453 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
456 # Make sure we have a Files: field to parse...
457 if not changes.has_key(field):
458 raise NoFilesFieldError
460 # Make sure we recognise the format of the Files: field
461 format = re_verwithext.search(changes.get("format", "0.0"))
463 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
465 format = format.groups()
466 if format[1] == None:
467 format = int(float(format[0])), 0, format[2]
469 format = int(format[0]), int(format[1]), format[2]
470 if format[2] == None:
474 # format = (1,0) are the only formats we currently accept,
475 # format = (0,0) are missing format headers of which we still
476 # have some in the archive.
477 if format != (1,0) and format != (0,0):
478 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
480 if (format < (1,5) or format > (1,8)):
481 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
482 if field != "files" and format < (1,8):
483 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
485 includes_section = (not is_a_dsc) and field == "files"
487 # Parse each entry/line:
488 for i in changes[field].split('\n'):
492 section = priority = ""
495 (md5, size, section, priority, name) = s
497 (md5, size, name) = s
499 raise ParseChangesError, i
506 (section, component) = extract_component_from_section(section)
508 files[name] = Dict(size=size, section=section,
509 priority=priority, component=component)
510 files[name][hashname] = md5
514 ################################################################################
516 def force_to_utf8(s):
518 Forces a string to UTF-8. If the string isn't already UTF-8,
519 it's assumed to be ISO-8859-1.
525 latin1_s = unicode(s,'iso8859-1')
526 return latin1_s.encode('utf-8')
528 def rfc2047_encode(s):
530 Encodes a (header) string per RFC2047 if necessary. If the
531 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
534 codecs.lookup('ascii')[1](s)
539 codecs.lookup('utf-8')[1](s)
540 h = email.Header.Header(s, 'utf-8', 998)
543 h = email.Header.Header(s, 'iso-8859-1', 998)
546 ################################################################################
548 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
549 # with it. I know - I'll fix the suckage and make things
552 def fix_maintainer (maintainer):
554 Parses a Maintainer or Changed-By field and returns:
555 1. an RFC822 compatible version,
556 2. an RFC2047 compatible version,
560 The name is forced to UTF-8 for both 1. and 3.. If the name field
561 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
562 switched to 'email (name)' format.
565 maintainer = maintainer.strip()
567 return ('', '', '', '')
569 if maintainer.find("<") == -1:
572 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
573 email = maintainer[1:-1]
576 m = re_parse_maintainer.match(maintainer)
578 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
582 # Get an RFC2047 compliant version of the name
583 rfc2047_name = rfc2047_encode(name)
585 # Force the name to be UTF-8
586 name = force_to_utf8(name)
588 if name.find(',') != -1 or name.find('.') != -1:
589 rfc822_maint = "%s (%s)" % (email, name)
590 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
592 rfc822_maint = "%s <%s>" % (name, email)
593 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
595 if email.find("@") == -1 and email.find("buildd_") != 0:
596 raise ParseMaintError, "No @ found in email address part."
598 return (rfc822_maint, rfc2047_maint, name, email)
600 ################################################################################
602 def send_mail (message, filename=""):
603 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
605 # If we've been passed a string dump it into a temporary file
607 (fd, filename) = tempfile.mkstemp()
608 os.write (fd, message)
611 if Cnf.has_key("Dinstall::MailWhiteList") and \
612 Cnf["Dinstall::MailWhiteList"] != "":
613 message_in = open_file(filename)
614 message_raw = modemail.message_from_file(message_in)
618 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
620 for line in whitelist_in:
621 if not re_whitespace_comment.match(line):
622 if re_re_mark.match(line):
623 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
625 whitelist.append(re.compile(re.escape(line.strip())))
630 fields = ["To", "Bcc", "Cc"]
633 value = message_raw.get(field, None)
636 for item in value.split(","):
637 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
643 if not mail_whitelisted:
644 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
648 # Doesn't have any mail in whitelist so remove the header
650 del message_raw[field]
652 message_raw.replace_header(field, string.join(match, ", "))
654 # Change message fields in order if we don't have a To header
655 if not message_raw.has_key("To"):
658 if message_raw.has_key(field):
659 message_raw[fields[-1]] = message_raw[field]
660 del message_raw[field]
663 # Clean up any temporary files
664 # and return, as we removed all recipients.
666 os.unlink (filename);
669 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
670 os.write (fd, message_raw.as_string(True));
674 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
676 raise SendmailFailedError, output
678 # Clean up any temporary files
682 ################################################################################
684 def poolify (source, component):
687 if source[:3] == "lib":
688 return component + source[:4] + '/' + source + '/'
690 return component + source[:1] + '/' + source + '/'
692 ################################################################################
694 def move (src, dest, overwrite = 0, perms = 0664):
695 if os.path.exists(dest) and os.path.isdir(dest):
698 dest_dir = os.path.dirname(dest)
699 if not os.path.exists(dest_dir):
700 umask = os.umask(00000)
701 os.makedirs(dest_dir, 02775)
703 #print "Moving %s to %s..." % (src, dest)
704 if os.path.exists(dest) and os.path.isdir(dest):
705 dest += '/' + os.path.basename(src)
706 # Don't overwrite unless forced to
707 if os.path.exists(dest):
709 fubar("Can't move %s to %s - file already exists." % (src, dest))
711 if not os.access(dest, os.W_OK):
712 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
713 shutil.copy2(src, dest)
714 os.chmod(dest, perms)
717 def copy (src, dest, overwrite = 0, perms = 0664):
718 if os.path.exists(dest) and os.path.isdir(dest):
721 dest_dir = os.path.dirname(dest)
722 if not os.path.exists(dest_dir):
723 umask = os.umask(00000)
724 os.makedirs(dest_dir, 02775)
726 #print "Copying %s to %s..." % (src, dest)
727 if os.path.exists(dest) and os.path.isdir(dest):
728 dest += '/' + os.path.basename(src)
729 # Don't overwrite unless forced to
730 if os.path.exists(dest):
732 raise FileExistsError
734 if not os.access(dest, os.W_OK):
735 raise CantOverwriteError
736 shutil.copy2(src, dest)
737 os.chmod(dest, perms)
739 ################################################################################
742 res = socket.gethostbyaddr(socket.gethostname())
743 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
744 if database_hostname:
745 return database_hostname
749 def which_conf_file ():
750 res = socket.gethostbyaddr(socket.gethostname())
751 # In case we allow local config files per user, try if one exists
752 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
753 homedir = os.getenv("HOME")
754 confpath = os.path.join(homedir, "/etc/dak.conf")
755 if os.path.exists(confpath):
756 apt_pkg.ReadConfigFileISC(Cnf,default_config)
758 # We are still in here, so there is no local config file or we do
759 # not allow local files. Do the normal stuff.
760 if Cnf.get("Config::" + res[0] + "::DakConfig"):
761 return Cnf["Config::" + res[0] + "::DakConfig"]
763 return default_config
765 def which_apt_conf_file ():
766 res = socket.gethostbyaddr(socket.gethostname())
767 # In case we allow local config files per user, try if one exists
768 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
769 homedir = os.getenv("HOME")
770 confpath = os.path.join(homedir, "/etc/dak.conf")
771 if os.path.exists(confpath):
772 apt_pkg.ReadConfigFileISC(Cnf,default_config)
774 if Cnf.get("Config::" + res[0] + "::AptConfig"):
775 return Cnf["Config::" + res[0] + "::AptConfig"]
777 return default_apt_config
779 def which_alias_file():
780 hostname = socket.gethostbyaddr(socket.gethostname())[0]
781 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
782 if os.path.exists(aliasfn):
787 ################################################################################
789 # Escape characters which have meaning to SQL's regex comparison operator ('~')
790 # (woefully incomplete)
793 s = s.replace('+', '\\\\+')
794 s = s.replace('.', '\\\\.')
797 ################################################################################
799 def TemplateSubst(map, filename):
800 """ Perform a substition of template """
801 templatefile = open_file(filename)
802 template = templatefile.read()
804 template = template.replace(x,map[x])
808 ################################################################################
810 def fubar(msg, exit_code=1):
811 sys.stderr.write("E: %s\n" % (msg))
815 sys.stderr.write("W: %s\n" % (msg))
817 ################################################################################
819 # Returns the user name with a laughable attempt at rfc822 conformancy
820 # (read: removing stray periods).
822 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
825 return pwd.getpwuid(os.getuid())[0]
827 ################################################################################
837 return ("%d%s" % (c, t))
839 ################################################################################
841 def cc_fix_changes (changes):
842 o = changes.get("architecture", "")
844 del changes["architecture"]
845 changes["architecture"] = {}
847 changes["architecture"][j] = 1
849 def changes_compare (a, b):
850 """ Sort by source name, source version, 'have source', and then by filename """
852 a_changes = parse_changes(a)
857 b_changes = parse_changes(b)
861 cc_fix_changes (a_changes)
862 cc_fix_changes (b_changes)
864 # Sort by source name
865 a_source = a_changes.get("source")
866 b_source = b_changes.get("source")
867 q = cmp (a_source, b_source)
871 # Sort by source version
872 a_version = a_changes.get("version", "0")
873 b_version = b_changes.get("version", "0")
874 q = apt_pkg.VersionCompare(a_version, b_version)
878 # Sort by 'have source'
879 a_has_source = a_changes["architecture"].get("source")
880 b_has_source = b_changes["architecture"].get("source")
881 if a_has_source and not b_has_source:
883 elif b_has_source and not a_has_source:
886 # Fall back to sort by filename
889 ################################################################################
891 def find_next_free (dest, too_many=100):
894 while os.path.exists(dest) and extra < too_many:
895 dest = orig_dest + '.' + repr(extra)
897 if extra >= too_many:
898 raise NoFreeFilenameError
901 ################################################################################
903 def result_join (original, sep = '\t'):
905 for i in xrange(len(original)):
906 if original[i] == None:
907 resultlist.append("")
909 resultlist.append(original[i])
910 return sep.join(resultlist)
912 ################################################################################
914 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
916 for line in str.split('\n'):
918 if line or include_blank_lines:
919 out += "%s%s\n" % (prefix, line)
920 # Strip trailing new line
925 ################################################################################
927 def validate_changes_file_arg(filename, require_changes=1):
929 'filename' is either a .changes or .dak file. If 'filename' is a
930 .dak file, it's changed to be the corresponding .changes file. The
931 function then checks if the .changes file a) exists and b) is
932 readable and returns the .changes filename if so. If there's a
933 problem, the next action depends on the option 'require_changes'
936 - If 'require_changes' == -1, errors are ignored and the .changes
937 filename is returned.
938 - If 'require_changes' == 0, a warning is given and 'None' is returned.
939 - If 'require_changes' == 1, a fatal error is raised.
944 orig_filename = filename
945 if filename.endswith(".dak"):
946 filename = filename[:-4]+".changes"
948 if not filename.endswith(".changes"):
949 error = "invalid file type; not a changes file"
951 if not os.access(filename,os.R_OK):
952 if os.path.exists(filename):
953 error = "permission denied"
955 error = "file not found"
958 if require_changes == 1:
959 fubar("%s: %s." % (orig_filename, error))
960 elif require_changes == 0:
961 warn("Skipping %s - %s" % (orig_filename, error))
963 else: # We only care about the .dak file
968 ################################################################################
971 return (arch != "source" and arch != "all")
973 ################################################################################
975 def join_with_commas_and(list):
976 if len(list) == 0: return "nothing"
977 if len(list) == 1: return list[0]
978 return ", ".join(list[:-1]) + " and " + list[-1]
980 ################################################################################
985 (pkg, version, constraint) = atom
987 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
990 pp_deps.append(pp_dep)
991 return " |".join(pp_deps)
993 ################################################################################
998 ################################################################################
1000 def parse_args(Options):
1001 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1003 if Options["Suite"]:
1005 for suite in split_args(Options["Suite"]):
1006 suite_id = database.get_suite_id(suite)
1008 warn("suite '%s' not recognised." % (suite))
1010 suite_ids_list.append(suite_id)
1012 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1014 fubar("No valid suite given.")
1019 if Options["Component"]:
1020 component_ids_list = []
1021 for component in split_args(Options["Component"]):
1022 component_id = database.get_component_id(component)
1023 if component_id == -1:
1024 warn("component '%s' not recognised." % (component))
1026 component_ids_list.append(component_id)
1027 if component_ids_list:
1028 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1030 fubar("No valid component given.")
1034 # Process architecture
1035 con_architectures = ""
1036 if Options["Architecture"]:
1039 for architecture in split_args(Options["Architecture"]):
1040 if architecture == "source":
1043 architecture_id = database.get_architecture_id(architecture)
1044 if architecture_id == -1:
1045 warn("architecture '%s' not recognised." % (architecture))
1047 arch_ids_list.append(architecture_id)
1049 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1051 if not check_source:
1052 fubar("No valid architecture given.")
1056 return (con_suites, con_architectures, con_components, check_source)
1058 ################################################################################
1060 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1061 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1064 tb = sys.exc_info()[2]
1071 frame = frame.f_back
1073 traceback.print_exc()
1075 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1076 frame.f_code.co_filename,
1078 for key, value in frame.f_locals.items():
1079 print "\t%20s = " % key,
1083 print "<unable to print>"
1085 ################################################################################
1087 def try_with_debug(function):
1095 ################################################################################
1097 def arch_compare_sw (a, b):
1099 Function for use in sorting lists of architectures.
1101 Sorts normally except that 'source' dominates all others.
1104 if a == "source" and b == "source":
1113 ################################################################################
1115 def split_args (s, dwim=1):
1117 Split command line arguments which can be separated by either commas
1118 or whitespace. If dwim is set, it will complain about string ending
1119 in comma since this usually means someone did 'dak ls -a i386, m68k
1120 foo' or something and the inevitable confusion resulting from 'm68k'
1121 being treated as an argument is undesirable.
1124 if s.find(",") == -1:
1127 if s[-1:] == "," and dwim:
1128 fubar("split_args: found trailing comma, spurious space maybe?")
1131 ################################################################################
1133 def Dict(**dict): return dict
1135 ########################################
1137 def gpgv_get_status_output(cmd, status_read, status_write):
1139 Our very own version of commands.getouputstatus(), hacked to support
1143 cmd = ['/bin/sh', '-c', cmd]
1144 p2cread, p2cwrite = os.pipe()
1145 c2pread, c2pwrite = os.pipe()
1146 errout, errin = os.pipe()
1156 for i in range(3, 256):
1157 if i != status_write:
1163 os.execvp(cmd[0], cmd)
1169 os.dup2(c2pread, c2pwrite)
1170 os.dup2(errout, errin)
1172 output = status = ""
1174 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1177 r = os.read(fd, 8196)
1179 more_data.append(fd)
1180 if fd == c2pwrite or fd == errin:
1182 elif fd == status_read:
1185 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1187 pid, exit_status = os.waitpid(pid, 0)
1189 os.close(status_write)
1190 os.close(status_read)
1200 return output, status, exit_status
1202 ################################################################################
1204 def process_gpgv_output(status):
1205 # Process the status-fd output
1208 for line in status.split('\n'):
1212 split = line.split()
1214 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1216 (gnupg, keyword) = split[:2]
1217 if gnupg != "[GNUPG:]":
1218 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1221 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1222 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1225 keywords[keyword] = args
1227 return (keywords, internal_error)
1229 ################################################################################
1231 def retrieve_key (filename, keyserver=None, keyring=None):
1233 Retrieve the key that signed 'filename' from 'keyserver' and
1234 add it to 'keyring'. Returns nothing on success, or an error message
1238 # Defaults for keyserver and keyring
1240 keyserver = Cnf["Dinstall::KeyServer"]
1242 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1244 # Ensure the filename contains no shell meta-characters or other badness
1245 if not re_taint_free.match(filename):
1246 return "%s: tainted filename" % (filename)
1248 # Invoke gpgv on the file
1249 status_read, status_write = os.pipe()
1250 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1251 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1253 # Process the status-fd output
1254 (keywords, internal_error) = process_gpgv_output(status)
1256 return internal_error
1258 if not keywords.has_key("NO_PUBKEY"):
1259 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1261 fingerprint = keywords["NO_PUBKEY"][0]
1262 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1263 # it'll try to create a lockfile in /dev. A better solution might
1264 # be a tempfile or something.
1265 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1266 % (Cnf["Dinstall::SigningKeyring"])
1267 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1268 % (keyring, keyserver, fingerprint)
1269 (result, output) = commands.getstatusoutput(cmd)
1271 return "'%s' failed with exit code %s" % (cmd, result)
1275 ################################################################################
1277 def gpg_keyring_args(keyrings=None):
1279 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1281 return " ".join(["--keyring %s" % x for x in keyrings])
1283 ################################################################################
1285 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1287 Check the signature of a file and return the fingerprint if the
1288 signature is valid or 'None' if it's not. The first argument is the
1289 filename whose signature should be checked. The second argument is a
1290 reject function and is called when an error is found. The reject()
1291 function must allow for two arguments: the first is the error message,
1292 the second is an optional prefix string. It's possible for reject()
1293 to be called more than once during an invocation of check_signature().
1294 The third argument is optional and is the name of the files the
1295 detached signature applies to. The fourth argument is optional and is
1296 a *list* of keyrings to use. 'autofetch' can either be None, True or
1297 False. If None, the default behaviour specified in the config will be
1301 # Ensure the filename contains no shell meta-characters or other badness
1302 if not re_taint_free.match(sig_filename):
1303 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1306 if data_filename and not re_taint_free.match(data_filename):
1307 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1311 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1313 # Autofetch the signing key if that's enabled
1314 if autofetch == None:
1315 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1317 error_msg = retrieve_key(sig_filename)
1322 # Build the command line
1323 status_read, status_write = os.pipe()
1324 cmd = "gpgv --status-fd %s %s %s %s" % (
1325 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1327 # Invoke gpgv on the file
1328 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1330 # Process the status-fd output
1331 (keywords, internal_error) = process_gpgv_output(status)
1333 # If we failed to parse the status-fd output, let's just whine and bail now
1335 reject("internal error while performing signature check on %s." % (sig_filename))
1336 reject(internal_error, "")
1337 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1341 # Now check for obviously bad things in the processed output
1342 if keywords.has_key("KEYREVOKED"):
1343 reject("The key used to sign %s has been revoked." % (sig_filename))
1345 if keywords.has_key("BADSIG"):
1346 reject("bad signature on %s." % (sig_filename))
1348 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1349 reject("failed to check signature on %s." % (sig_filename))
1351 if keywords.has_key("NO_PUBKEY"):
1352 args = keywords["NO_PUBKEY"]
1355 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1357 if keywords.has_key("BADARMOR"):
1358 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1360 if keywords.has_key("NODATA"):
1361 reject("no signature found in %s." % (sig_filename))
1363 if keywords.has_key("EXPKEYSIG"):
1364 args = keywords["EXPKEYSIG"]
1367 reject("Signature made by expired key 0x%s" % (key))
1369 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1370 args = keywords["KEYEXPIRED"]
1374 if timestamp.count("T") == 0:
1376 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1378 expiredate = "unknown (%s)" % (timestamp)
1380 expiredate = timestamp
1381 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1387 # Next check gpgv exited with a zero return code
1389 reject("gpgv failed while checking %s." % (sig_filename))
1391 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1393 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1396 # Sanity check the good stuff we expect
1397 if not keywords.has_key("VALIDSIG"):
1398 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1401 args = keywords["VALIDSIG"]
1403 reject("internal error while checking signature on %s." % (sig_filename))
1406 fingerprint = args[0]
1407 if not keywords.has_key("GOODSIG"):
1408 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1410 if not keywords.has_key("SIG_ID"):
1411 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1414 # Finally ensure there's not something we don't recognise
1415 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1416 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1417 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1419 for keyword in keywords.keys():
1420 if not known_keywords.has_key(keyword):
1421 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1429 ################################################################################
1431 def gpg_get_key_addresses(fingerprint):
1432 """retreive email addresses from gpg key uids for a given fingerprint"""
1433 addresses = key_uid_email_cache.get(fingerprint)
1434 if addresses != None:
1437 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1438 % (gpg_keyring_args(), fingerprint)
1439 (result, output) = commands.getstatusoutput(cmd)
1441 for l in output.split('\n'):
1442 m = re_gpg_uid.match(l)
1444 addresses.add(m.group(1))
1445 key_uid_email_cache[fingerprint] = addresses
1448 ################################################################################
1450 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1452 def wrap(paragraph, max_length, prefix=""):
1456 words = paragraph.split()
1459 word_size = len(word)
1460 if word_size > max_length:
1462 s += line + '\n' + prefix
1463 s += word + '\n' + prefix
1466 new_length = len(line) + word_size + 1
1467 if new_length > max_length:
1468 s += line + '\n' + prefix
1481 ################################################################################
1483 def clean_symlink (src, dest, root):
1485 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1488 src = src.replace(root, '', 1)
1489 dest = dest.replace(root, '', 1)
1490 dest = os.path.dirname(dest)
1491 new_src = '../' * len(dest.split('/'))
1492 return new_src + src
1494 ################################################################################
1496 def temp_filename(directory=None, prefix="dak", suffix=""):
1498 Return a secure and unique filename by pre-creating it.
1499 If 'directory' is non-null, it will be the directory the file is pre-created in.
1500 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1501 If 'suffix' is non-null, the filename will end with it.
1503 Returns a pair (fd, name).
1506 return tempfile.mkstemp(suffix, prefix, directory)
1508 ################################################################################
1510 def temp_dirname(parent=None, prefix="dak", suffix=""):
1512 Return a secure and unique directory by pre-creating it.
1513 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1514 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1515 If 'suffix' is non-null, the filename will end with it.
1517 Returns a pathname to the new directory
1520 return tempfile.mkdtemp(suffix, prefix, parent)
1522 ################################################################################
1524 def is_email_alias(email):
1525 """ checks if the user part of the email is listed in the alias file """
1527 if alias_cache == None:
1528 aliasfn = which_alias_file()
1531 for l in open(aliasfn):
1532 alias_cache.add(l.split(':')[0])
1533 uid = email.split('@')[0]
1534 return uid in alias_cache
1536 ################################################################################
1538 def get_changes_files(dir):
1540 Takes a directory and lists all .changes files in it (as well as chdir'ing
1541 to the directory; this is due to broken behaviour on the part of p-u/p-a
1542 when you're not in the right place)
1544 Returns a list of filenames
1547 # Much of the rest of p-u/p-a depends on being in the right place
1549 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1551 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1553 return changes_files
1555 ################################################################################
1559 Cnf = apt_pkg.newConfiguration()
1560 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1562 #if which_conf_file() != default_config:
1563 # apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1565 ###############################################################################