2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
43 from dak_exceptions import *
44 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
45 re_multi_line_field, re_srchasver, re_verwithext, \
46 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark
48 ################################################################################
50 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
51 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
53 alias_cache = None #: Cache for email alias checks
54 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
56 # (hashname, function, earliest_changes_version)
57 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
58 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
60 ################################################################################
63 """ Escape html chars """
64 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
66 ################################################################################
68 def open_file(filename, mode='r'):
70 Open C{file}, return fileobject.
72 @type filename: string
73 @param filename: path/filename to open
76 @param mode: open mode
79 @return: open fileobject
81 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
85 f = open(filename, mode)
87 raise CantOpenError, filename
90 ################################################################################
92 def our_raw_input(prompt=""):
94 sys.stdout.write(prompt)
100 sys.stderr.write("\nUser interrupt (^D).\n")
103 ################################################################################
105 def extract_component_from_section(section):
108 if section.find('/') != -1:
109 component = section.split('/')[0]
111 # Expand default component
113 if Cnf.has_key("Component::%s" % section):
118 return (section, component)
120 ################################################################################
122 def parse_deb822(contents, signing_rules=0):
126 # Split the lines in the input, keeping the linebreaks.
127 lines = contents.splitlines(True)
130 raise ParseChangesError, "[Empty changes file]"
132 # Reindex by line number so we can easily verify the format of
138 indexed_lines[index] = line[:-1]
142 num_of_lines = len(indexed_lines.keys())
145 while index < num_of_lines:
147 line = indexed_lines[index]
149 if signing_rules == 1:
151 if index > num_of_lines:
152 raise InvalidDscError, index
153 line = indexed_lines[index]
154 if not line.startswith("-----BEGIN PGP SIGNATURE"):
155 raise InvalidDscError, index
160 if line.startswith("-----BEGIN PGP SIGNATURE"):
162 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
164 if signing_rules == 1:
165 while index < num_of_lines and line != "":
167 line = indexed_lines[index]
169 # If we're not inside the signed data, don't process anything
170 if signing_rules >= 0 and not inside_signature:
172 slf = re_single_line_field.match(line)
174 field = slf.groups()[0].lower()
175 changes[field] = slf.groups()[1]
179 changes[field] += '\n'
181 mlf = re_multi_line_field.match(line)
184 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
185 if first == 1 and changes[field] != "":
186 changes[field] += '\n'
188 changes[field] += mlf.groups()[0] + '\n'
192 if signing_rules == 1 and inside_signature:
193 raise InvalidDscError, index
195 changes["filecontents"] = "".join(lines)
197 if changes.has_key("source"):
198 # Strip the source version in brackets from the source field,
199 # put it in the "source-version" field instead.
200 srcver = re_srchasver.search(changes["source"])
202 changes["source"] = srcver.group(1)
203 changes["source-version"] = srcver.group(2)
206 raise ParseChangesError, error
210 ################################################################################
212 def parse_changes(filename, signing_rules=0):
214 Parses a changes file and returns a dictionary where each field is a
215 key. The mandatory first argument is the filename of the .changes
218 signing_rules is an optional argument:
220 - If signing_rules == -1, no signature is required.
221 - If signing_rules == 0 (the default), a signature is required.
222 - If signing_rules == 1, it turns on the same strict format checking
225 The rules for (signing_rules == 1)-mode are:
227 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
228 followed by any PGP header data and must end with a blank line.
230 - The data section must end with a blank line and must be followed by
231 "-----BEGIN PGP SIGNATURE-----".
234 changes_in = open_file(filename)
235 content = changes_in.read()
237 return parse_deb822(content, signing_rules)
239 ################################################################################
241 def hash_key(hashname):
242 return '%ssum' % hashname
244 ################################################################################
246 def create_hash(where, files, hashname, hashfunc):
248 create_hash extends the passed files dict with the given hash by
249 iterating over all files on disk and passing them to the hashing
254 for f in files.keys():
256 file_handle = open_file(f)
257 except CantOpenError:
258 rejmsg.append("Could not open file %s for checksumming" % (f))
260 files[f][hash_key(hashname)] = hashfunc(file_handle)
265 ################################################################################
267 def check_hash(where, files, hashname, hashfunc):
269 check_hash checks the given hash in the files dict against the actual
270 files on disk. The hash values need to be present consistently in
271 all file entries. It does not modify its input in any way.
275 for f in files.keys():
279 file_handle = open_file(f)
281 # Check for the hash entry, to not trigger a KeyError.
282 if not files[f].has_key(hash_key(hashname)):
283 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
287 # Actually check the hash for correctness.
288 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
289 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
291 except CantOpenError:
292 # TODO: This happens when the file is in the pool.
293 # warn("Cannot open file %s" % f)
300 ################################################################################
302 def check_size(where, files):
304 check_size checks the file sizes in the passed files dict against the
309 for f in files.keys():
314 # TODO: This happens when the file is in the pool.
318 actual_size = entry[stat.ST_SIZE]
319 size = int(files[f]["size"])
320 if size != actual_size:
321 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
322 % (f, actual_size, size, where))
325 ################################################################################
327 def check_hash_fields(what, manifest):
329 check_hash_fields ensures that there are no checksum fields in the
330 given dict that we do not know about.
334 hashes = map(lambda x: x[0], known_hashes)
335 for field in manifest:
336 if field.startswith("checksums-"):
337 hashname = field.split("-",1)[1]
338 if hashname not in hashes:
339 rejmsg.append("Unsupported checksum field for %s "\
340 "in %s" % (hashname, what))
343 ################################################################################
345 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
346 if format >= version:
347 # The version should contain the specified hash.
350 # Import hashes from the changes
351 rejmsg = parse_checksums(".changes", files, changes, hashname)
355 # We need to calculate the hash because it can't possibly
358 return func(".changes", files, hashname, hashfunc)
360 # We could add the orig which might be in the pool to the files dict to
361 # access the checksums easily.
363 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
365 ensure_dsc_hashes' task is to ensure that each and every *present* hash
366 in the dsc is correct, i.e. identical to the changes file and if necessary
367 the pool. The latter task is delegated to check_hash.
371 if not dsc.has_key('Checksums-%s' % (hashname,)):
373 # Import hashes from the dsc
374 parse_checksums(".dsc", dsc_files, dsc, hashname)
376 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
379 ################################################################################
381 def ensure_hashes(changes, dsc, files, dsc_files):
384 # Make sure we recognise the format of the Files: field in the .changes
385 format = changes.get("format", "0.0").split(".", 1)
387 format = int(format[0]), int(format[1])
389 format = int(float(format[0])), 0
391 # We need to deal with the original changes blob, as the fields we need
392 # might not be in the changes dict serialised into the .dak anymore.
393 orig_changes = parse_deb822(changes['filecontents'])
395 # Copy the checksums over to the current changes dict. This will keep
396 # the existing modifications to it intact.
397 for field in orig_changes:
398 if field.startswith('checksums-'):
399 changes[field] = orig_changes[field]
401 # Check for unsupported hashes
402 rejmsg.extend(check_hash_fields(".changes", changes))
403 rejmsg.extend(check_hash_fields(".dsc", dsc))
405 # We have to calculate the hash if we have an earlier changes version than
406 # the hash appears in rather than require it exist in the changes file
407 for hashname, hashfunc, version in known_hashes:
408 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
410 if "source" in changes["architecture"]:
411 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
416 def parse_checksums(where, files, manifest, hashname):
418 field = 'checksums-%s' % hashname
419 if not field in manifest:
421 for line in manifest[field].split('\n'):
424 checksum, size, checkfile = line.strip().split(' ')
425 if not files.has_key(checkfile):
426 # TODO: check for the file's entry in the original files dict, not
427 # the one modified by (auto)byhand and other weird stuff
428 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
429 # (file, hashname, where))
431 if not files[checkfile]["size"] == size:
432 rejmsg.append("%s: size differs for files and checksums-%s entry "\
433 "in %s" % (checkfile, hashname, where))
435 files[checkfile][hash_key(hashname)] = checksum
436 for f in files.keys():
437 if not files[f].has_key(hash_key(hashname)):
438 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
442 ################################################################################
444 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
446 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
449 # Make sure we have a Files: field to parse...
450 if not changes.has_key(field):
451 raise NoFilesFieldError
453 # Make sure we recognise the format of the Files: field
454 format = re_verwithext.search(changes.get("format", "0.0"))
456 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
458 format = format.groups()
459 if format[1] == None:
460 format = int(float(format[0])), 0, format[2]
462 format = int(format[0]), int(format[1]), format[2]
463 if format[2] == None:
467 # format = (1,0) are the only formats we currently accept,
468 # format = (0,0) are missing format headers of which we still
469 # have some in the archive.
470 if format != (1,0) and format != (0,0):
471 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
473 if (format < (1,5) or format > (1,8)):
474 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
475 if field != "files" and format < (1,8):
476 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
478 includes_section = (not is_a_dsc) and field == "files"
480 # Parse each entry/line:
481 for i in changes[field].split('\n'):
485 section = priority = ""
488 (md5, size, section, priority, name) = s
490 (md5, size, name) = s
492 raise ParseChangesError, i
499 (section, component) = extract_component_from_section(section)
501 files[name] = Dict(size=size, section=section,
502 priority=priority, component=component)
503 files[name][hashname] = md5
507 ################################################################################
509 def force_to_utf8(s):
511 Forces a string to UTF-8. If the string isn't already UTF-8,
512 it's assumed to be ISO-8859-1.
518 latin1_s = unicode(s,'iso8859-1')
519 return latin1_s.encode('utf-8')
521 def rfc2047_encode(s):
523 Encodes a (header) string per RFC2047 if necessary. If the
524 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
527 codecs.lookup('ascii')[1](s)
532 codecs.lookup('utf-8')[1](s)
533 h = email.Header.Header(s, 'utf-8', 998)
536 h = email.Header.Header(s, 'iso-8859-1', 998)
539 ################################################################################
541 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
542 # with it. I know - I'll fix the suckage and make things
545 def fix_maintainer (maintainer):
547 Parses a Maintainer or Changed-By field and returns:
548 1. an RFC822 compatible version,
549 2. an RFC2047 compatible version,
553 The name is forced to UTF-8 for both 1. and 3.. If the name field
554 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
555 switched to 'email (name)' format.
558 maintainer = maintainer.strip()
560 return ('', '', '', '')
562 if maintainer.find("<") == -1:
565 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
566 email = maintainer[1:-1]
569 m = re_parse_maintainer.match(maintainer)
571 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
575 # Get an RFC2047 compliant version of the name
576 rfc2047_name = rfc2047_encode(name)
578 # Force the name to be UTF-8
579 name = force_to_utf8(name)
581 if name.find(',') != -1 or name.find('.') != -1:
582 rfc822_maint = "%s (%s)" % (email, name)
583 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
585 rfc822_maint = "%s <%s>" % (name, email)
586 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
588 if email.find("@") == -1 and email.find("buildd_") != 0:
589 raise ParseMaintError, "No @ found in email address part."
591 return (rfc822_maint, rfc2047_maint, name, email)
593 ################################################################################
595 def send_mail (message, filename=""):
596 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
598 # If we've been passed a string dump it into a temporary file
600 (fd, filename) = tempfile.mkstemp()
601 os.write (fd, message)
604 if Cnf.has_key("Dinstall::MailWhiteList") and \
605 Cnf["Dinstall::MailWhiteList"] != "":
606 message_in = open_file(filename)
607 message_raw = modemail.message_from_file(message_in)
611 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
613 for line in whitelist_in:
614 if re_re_mark.match(line):
615 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
617 whitelist.append(re.compile(re.escape(line.strip())))
622 fields = ["To", "Bcc", "Cc"]
625 value = message_raw.get(field, None)
628 for item in value.split(","):
629 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
635 if not mail_whitelisted:
636 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
640 # Doesn't have any mail in whitelist so remove the header
642 del message_raw[field]
644 message_raw.replace_header(field, string.join(match, ", "))
646 # Change message fields in order if we don't have a To header
647 if not message_raw.has_key("To"):
650 if message_raw.has_key(field):
651 message_raw[fields[-1]] = message_raw[field]
652 del message_raw[field]
655 # Clean up any temporary files
656 # and return, as we removed all recipients.
658 os.unlink (filename);
661 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
662 os.write (fd, message_raw.as_string(True));
666 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
668 raise SendmailFailedError, output
670 # Clean up any temporary files
674 ################################################################################
676 def poolify (source, component):
679 if source[:3] == "lib":
680 return component + source[:4] + '/' + source + '/'
682 return component + source[:1] + '/' + source + '/'
684 ################################################################################
686 def move (src, dest, overwrite = 0, perms = 0664):
687 if os.path.exists(dest) and os.path.isdir(dest):
690 dest_dir = os.path.dirname(dest)
691 if not os.path.exists(dest_dir):
692 umask = os.umask(00000)
693 os.makedirs(dest_dir, 02775)
695 #print "Moving %s to %s..." % (src, dest)
696 if os.path.exists(dest) and os.path.isdir(dest):
697 dest += '/' + os.path.basename(src)
698 # Don't overwrite unless forced to
699 if os.path.exists(dest):
701 fubar("Can't move %s to %s - file already exists." % (src, dest))
703 if not os.access(dest, os.W_OK):
704 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
705 shutil.copy2(src, dest)
706 os.chmod(dest, perms)
709 def copy (src, dest, overwrite = 0, perms = 0664):
710 if os.path.exists(dest) and os.path.isdir(dest):
713 dest_dir = os.path.dirname(dest)
714 if not os.path.exists(dest_dir):
715 umask = os.umask(00000)
716 os.makedirs(dest_dir, 02775)
718 #print "Copying %s to %s..." % (src, dest)
719 if os.path.exists(dest) and os.path.isdir(dest):
720 dest += '/' + os.path.basename(src)
721 # Don't overwrite unless forced to
722 if os.path.exists(dest):
724 raise FileExistsError
726 if not os.access(dest, os.W_OK):
727 raise CantOverwriteError
728 shutil.copy2(src, dest)
729 os.chmod(dest, perms)
731 ################################################################################
734 res = socket.gethostbyaddr(socket.gethostname())
735 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
736 if database_hostname:
737 return database_hostname
741 def which_conf_file ():
742 res = socket.gethostbyaddr(socket.gethostname())
743 if Cnf.get("Config::" + res[0] + "::DakConfig"):
744 return Cnf["Config::" + res[0] + "::DakConfig"]
746 return default_config
748 def which_apt_conf_file ():
749 res = socket.gethostbyaddr(socket.gethostname())
750 if Cnf.get("Config::" + res[0] + "::AptConfig"):
751 return Cnf["Config::" + res[0] + "::AptConfig"]
753 return default_apt_config
755 def which_alias_file():
756 hostname = socket.gethostbyaddr(socket.gethostname())[0]
757 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
758 if os.path.exists(aliasfn):
763 ################################################################################
765 # Escape characters which have meaning to SQL's regex comparison operator ('~')
766 # (woefully incomplete)
769 s = s.replace('+', '\\\\+')
770 s = s.replace('.', '\\\\.')
773 ################################################################################
775 def TemplateSubst(map, filename):
776 """ Perform a substition of template """
777 templatefile = open_file(filename)
778 template = templatefile.read()
780 template = template.replace(x,map[x])
784 ################################################################################
786 def fubar(msg, exit_code=1):
787 sys.stderr.write("E: %s\n" % (msg))
791 sys.stderr.write("W: %s\n" % (msg))
793 ################################################################################
795 # Returns the user name with a laughable attempt at rfc822 conformancy
796 # (read: removing stray periods).
798 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
800 ################################################################################
810 return ("%d%s" % (c, t))
812 ################################################################################
814 def cc_fix_changes (changes):
815 o = changes.get("architecture", "")
817 del changes["architecture"]
818 changes["architecture"] = {}
820 changes["architecture"][j] = 1
822 def changes_compare (a, b):
823 """ Sort by source name, source version, 'have source', and then by filename """
825 a_changes = parse_changes(a)
830 b_changes = parse_changes(b)
834 cc_fix_changes (a_changes)
835 cc_fix_changes (b_changes)
837 # Sort by source name
838 a_source = a_changes.get("source")
839 b_source = b_changes.get("source")
840 q = cmp (a_source, b_source)
844 # Sort by source version
845 a_version = a_changes.get("version", "0")
846 b_version = b_changes.get("version", "0")
847 q = apt_pkg.VersionCompare(a_version, b_version)
851 # Sort by 'have source'
852 a_has_source = a_changes["architecture"].get("source")
853 b_has_source = b_changes["architecture"].get("source")
854 if a_has_source and not b_has_source:
856 elif b_has_source and not a_has_source:
859 # Fall back to sort by filename
862 ################################################################################
864 def find_next_free (dest, too_many=100):
867 while os.path.exists(dest) and extra < too_many:
868 dest = orig_dest + '.' + repr(extra)
870 if extra >= too_many:
871 raise NoFreeFilenameError
874 ################################################################################
876 def result_join (original, sep = '\t'):
878 for i in xrange(len(original)):
879 if original[i] == None:
880 resultlist.append("")
882 resultlist.append(original[i])
883 return sep.join(resultlist)
885 ################################################################################
887 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
889 for line in str.split('\n'):
891 if line or include_blank_lines:
892 out += "%s%s\n" % (prefix, line)
893 # Strip trailing new line
898 ################################################################################
900 def validate_changes_file_arg(filename, require_changes=1):
902 'filename' is either a .changes or .dak file. If 'filename' is a
903 .dak file, it's changed to be the corresponding .changes file. The
904 function then checks if the .changes file a) exists and b) is
905 readable and returns the .changes filename if so. If there's a
906 problem, the next action depends on the option 'require_changes'
909 - If 'require_changes' == -1, errors are ignored and the .changes
910 filename is returned.
911 - If 'require_changes' == 0, a warning is given and 'None' is returned.
912 - If 'require_changes' == 1, a fatal error is raised.
917 orig_filename = filename
918 if filename.endswith(".dak"):
919 filename = filename[:-4]+".changes"
921 if not filename.endswith(".changes"):
922 error = "invalid file type; not a changes file"
924 if not os.access(filename,os.R_OK):
925 if os.path.exists(filename):
926 error = "permission denied"
928 error = "file not found"
931 if require_changes == 1:
932 fubar("%s: %s." % (orig_filename, error))
933 elif require_changes == 0:
934 warn("Skipping %s - %s" % (orig_filename, error))
936 else: # We only care about the .dak file
941 ################################################################################
944 return (arch != "source" and arch != "all")
946 ################################################################################
948 def join_with_commas_and(list):
949 if len(list) == 0: return "nothing"
950 if len(list) == 1: return list[0]
951 return ", ".join(list[:-1]) + " and " + list[-1]
953 ################################################################################
958 (pkg, version, constraint) = atom
960 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
963 pp_deps.append(pp_dep)
964 return " |".join(pp_deps)
966 ################################################################################
971 ################################################################################
973 def parse_args(Options):
974 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
978 for suite in split_args(Options["Suite"]):
979 suite_id = database.get_suite_id(suite)
981 warn("suite '%s' not recognised." % (suite))
983 suite_ids_list.append(suite_id)
985 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
987 fubar("No valid suite given.")
992 if Options["Component"]:
993 component_ids_list = []
994 for component in split_args(Options["Component"]):
995 component_id = database.get_component_id(component)
996 if component_id == -1:
997 warn("component '%s' not recognised." % (component))
999 component_ids_list.append(component_id)
1000 if component_ids_list:
1001 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1003 fubar("No valid component given.")
1007 # Process architecture
1008 con_architectures = ""
1009 if Options["Architecture"]:
1012 for architecture in split_args(Options["Architecture"]):
1013 if architecture == "source":
1016 architecture_id = database.get_architecture_id(architecture)
1017 if architecture_id == -1:
1018 warn("architecture '%s' not recognised." % (architecture))
1020 arch_ids_list.append(architecture_id)
1022 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1024 if not check_source:
1025 fubar("No valid architecture given.")
1029 return (con_suites, con_architectures, con_components, check_source)
1031 ################################################################################
1033 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1034 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1037 tb = sys.exc_info()[2]
1044 frame = frame.f_back
1046 traceback.print_exc()
1048 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1049 frame.f_code.co_filename,
1051 for key, value in frame.f_locals.items():
1052 print "\t%20s = " % key,
1056 print "<unable to print>"
1058 ################################################################################
1060 def try_with_debug(function):
1068 ################################################################################
1070 def arch_compare_sw (a, b):
1072 Function for use in sorting lists of architectures.
1074 Sorts normally except that 'source' dominates all others.
1077 if a == "source" and b == "source":
1086 ################################################################################
1088 def split_args (s, dwim=1):
1090 Split command line arguments which can be separated by either commas
1091 or whitespace. If dwim is set, it will complain about string ending
1092 in comma since this usually means someone did 'dak ls -a i386, m68k
1093 foo' or something and the inevitable confusion resulting from 'm68k'
1094 being treated as an argument is undesirable.
1097 if s.find(",") == -1:
1100 if s[-1:] == "," and dwim:
1101 fubar("split_args: found trailing comma, spurious space maybe?")
1104 ################################################################################
1106 def Dict(**dict): return dict
1108 ########################################
1110 def gpgv_get_status_output(cmd, status_read, status_write):
1112 Our very own version of commands.getouputstatus(), hacked to support
1116 cmd = ['/bin/sh', '-c', cmd]
1117 p2cread, p2cwrite = os.pipe()
1118 c2pread, c2pwrite = os.pipe()
1119 errout, errin = os.pipe()
1129 for i in range(3, 256):
1130 if i != status_write:
1136 os.execvp(cmd[0], cmd)
1142 os.dup2(c2pread, c2pwrite)
1143 os.dup2(errout, errin)
1145 output = status = ""
1147 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1150 r = os.read(fd, 8196)
1152 more_data.append(fd)
1153 if fd == c2pwrite or fd == errin:
1155 elif fd == status_read:
1158 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1160 pid, exit_status = os.waitpid(pid, 0)
1162 os.close(status_write)
1163 os.close(status_read)
1173 return output, status, exit_status
1175 ################################################################################
1177 def process_gpgv_output(status):
1178 # Process the status-fd output
1181 for line in status.split('\n'):
1185 split = line.split()
1187 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1189 (gnupg, keyword) = split[:2]
1190 if gnupg != "[GNUPG:]":
1191 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1194 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1195 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1198 keywords[keyword] = args
1200 return (keywords, internal_error)
1202 ################################################################################
1204 def retrieve_key (filename, keyserver=None, keyring=None):
1206 Retrieve the key that signed 'filename' from 'keyserver' and
1207 add it to 'keyring'. Returns nothing on success, or an error message
1211 # Defaults for keyserver and keyring
1213 keyserver = Cnf["Dinstall::KeyServer"]
1215 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1217 # Ensure the filename contains no shell meta-characters or other badness
1218 if not re_taint_free.match(filename):
1219 return "%s: tainted filename" % (filename)
1221 # Invoke gpgv on the file
1222 status_read, status_write = os.pipe()
1223 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1224 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1226 # Process the status-fd output
1227 (keywords, internal_error) = process_gpgv_output(status)
1229 return internal_error
1231 if not keywords.has_key("NO_PUBKEY"):
1232 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1234 fingerprint = keywords["NO_PUBKEY"][0]
1235 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1236 # it'll try to create a lockfile in /dev. A better solution might
1237 # be a tempfile or something.
1238 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1239 % (Cnf["Dinstall::SigningKeyring"])
1240 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1241 % (keyring, keyserver, fingerprint)
1242 (result, output) = commands.getstatusoutput(cmd)
1244 return "'%s' failed with exit code %s" % (cmd, result)
1248 ################################################################################
1250 def gpg_keyring_args(keyrings=None):
1252 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1254 return " ".join(["--keyring %s" % x for x in keyrings])
1256 ################################################################################
1258 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1260 Check the signature of a file and return the fingerprint if the
1261 signature is valid or 'None' if it's not. The first argument is the
1262 filename whose signature should be checked. The second argument is a
1263 reject function and is called when an error is found. The reject()
1264 function must allow for two arguments: the first is the error message,
1265 the second is an optional prefix string. It's possible for reject()
1266 to be called more than once during an invocation of check_signature().
1267 The third argument is optional and is the name of the files the
1268 detached signature applies to. The fourth argument is optional and is
1269 a *list* of keyrings to use. 'autofetch' can either be None, True or
1270 False. If None, the default behaviour specified in the config will be
1274 # Ensure the filename contains no shell meta-characters or other badness
1275 if not re_taint_free.match(sig_filename):
1276 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1279 if data_filename and not re_taint_free.match(data_filename):
1280 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1284 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1286 # Autofetch the signing key if that's enabled
1287 if autofetch == None:
1288 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1290 error_msg = retrieve_key(sig_filename)
1295 # Build the command line
1296 status_read, status_write = os.pipe()
1297 cmd = "gpgv --status-fd %s %s %s %s" % (
1298 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1300 # Invoke gpgv on the file
1301 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1303 # Process the status-fd output
1304 (keywords, internal_error) = process_gpgv_output(status)
1306 # If we failed to parse the status-fd output, let's just whine and bail now
1308 reject("internal error while performing signature check on %s." % (sig_filename))
1309 reject(internal_error, "")
1310 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1314 # Now check for obviously bad things in the processed output
1315 if keywords.has_key("KEYREVOKED"):
1316 reject("The key used to sign %s has been revoked." % (sig_filename))
1318 if keywords.has_key("BADSIG"):
1319 reject("bad signature on %s." % (sig_filename))
1321 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1322 reject("failed to check signature on %s." % (sig_filename))
1324 if keywords.has_key("NO_PUBKEY"):
1325 args = keywords["NO_PUBKEY"]
1328 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1330 if keywords.has_key("BADARMOR"):
1331 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1333 if keywords.has_key("NODATA"):
1334 reject("no signature found in %s." % (sig_filename))
1336 if keywords.has_key("EXPKEYSIG"):
1337 args = keywords["EXPKEYSIG"]
1340 reject("Signature made by expired key 0x%s" % (key))
1342 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1343 args = keywords["KEYEXPIRED"]
1347 if timestamp.count("T") == 0:
1348 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1350 expiredate = timestamp
1351 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1357 # Next check gpgv exited with a zero return code
1359 reject("gpgv failed while checking %s." % (sig_filename))
1361 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1363 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1366 # Sanity check the good stuff we expect
1367 if not keywords.has_key("VALIDSIG"):
1368 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1371 args = keywords["VALIDSIG"]
1373 reject("internal error while checking signature on %s." % (sig_filename))
1376 fingerprint = args[0]
1377 if not keywords.has_key("GOODSIG"):
1378 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1380 if not keywords.has_key("SIG_ID"):
1381 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1384 # Finally ensure there's not something we don't recognise
1385 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1386 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1387 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1389 for keyword in keywords.keys():
1390 if not known_keywords.has_key(keyword):
1391 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1399 ################################################################################
1401 def gpg_get_key_addresses(fingerprint):
1402 """retreive email addresses from gpg key uids for a given fingerprint"""
1403 addresses = key_uid_email_cache.get(fingerprint)
1404 if addresses != None:
1407 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1408 % (gpg_keyring_args(), fingerprint)
1409 (result, output) = commands.getstatusoutput(cmd)
1411 for l in output.split('\n'):
1412 m = re_gpg_uid.match(l)
1414 addresses.add(m.group(1))
1415 key_uid_email_cache[fingerprint] = addresses
1418 ################################################################################
1420 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1422 def wrap(paragraph, max_length, prefix=""):
1426 words = paragraph.split()
1429 word_size = len(word)
1430 if word_size > max_length:
1432 s += line + '\n' + prefix
1433 s += word + '\n' + prefix
1436 new_length = len(line) + word_size + 1
1437 if new_length > max_length:
1438 s += line + '\n' + prefix
1451 ################################################################################
1453 def clean_symlink (src, dest, root):
1455 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1458 src = src.replace(root, '', 1)
1459 dest = dest.replace(root, '', 1)
1460 dest = os.path.dirname(dest)
1461 new_src = '../' * len(dest.split('/'))
1462 return new_src + src
1464 ################################################################################
1466 def temp_filename(directory=None, prefix="dak", suffix=""):
1468 Return a secure and unique filename by pre-creating it.
1469 If 'directory' is non-null, it will be the directory the file is pre-created in.
1470 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1471 If 'suffix' is non-null, the filename will end with it.
1473 Returns a pair (fd, name).
1476 return tempfile.mkstemp(suffix, prefix, directory)
1478 ################################################################################
1480 def is_email_alias(email):
1481 """ checks if the user part of the email is listed in the alias file """
1483 if alias_cache == None:
1484 aliasfn = which_alias_file()
1487 for l in open(aliasfn):
1488 alias_cache.add(l.split(':')[0])
1489 uid = email.split('@')[0]
1490 return uid in alias_cache
1492 ################################################################################
1496 Cnf = apt_pkg.newConfiguration()
1497 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1499 if which_conf_file() != default_config:
1500 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1502 ################################################################################