2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
30 from dak_exceptions import *
31 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
32 re_multi_line_field, re_srchasver, re_verwithext, \
33 re_parse_maintainer, re_taint_free, re_gpg_uid
35 ################################################################################
37 default_config = "/etc/dak/dak.conf"
38 default_apt_config = "/etc/dak/apt.conf"
41 key_uid_email_cache = {}
43 # (hashname, function, earliest_changes_version)
44 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
45 ("sha256", apt_pkg.sha256sum, (1, 8))]
47 ################################################################################
50 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
52 ################################################################################
54 def open_file(filename, mode='r'):
56 f = open(filename, mode)
58 raise CantOpenError, filename
61 ################################################################################
63 def our_raw_input(prompt=""):
65 sys.stdout.write(prompt)
71 sys.stderr.write("\nUser interrupt (^D).\n")
74 ################################################################################
76 def extract_component_from_section(section):
79 if section.find('/') != -1:
80 component = section.split('/')[0]
82 # Expand default component
84 if Cnf.has_key("Component::%s" % section):
89 return (section, component)
91 ################################################################################
93 def parse_deb822(contents, signing_rules=0):
97 # Split the lines in the input, keeping the linebreaks.
98 lines = contents.splitlines(True)
101 raise ParseChangesError, "[Empty changes file]"
103 # Reindex by line number so we can easily verify the format of
109 indexed_lines[index] = line[:-1]
113 num_of_lines = len(indexed_lines.keys())
116 while index < num_of_lines:
118 line = indexed_lines[index]
120 if signing_rules == 1:
122 if index > num_of_lines:
123 raise InvalidDscError, index
124 line = indexed_lines[index]
125 if not line.startswith("-----BEGIN PGP SIGNATURE"):
126 raise InvalidDscError, index
131 if line.startswith("-----BEGIN PGP SIGNATURE"):
133 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
135 if signing_rules == 1:
136 while index < num_of_lines and line != "":
138 line = indexed_lines[index]
140 # If we're not inside the signed data, don't process anything
141 if signing_rules >= 0 and not inside_signature:
143 slf = re_single_line_field.match(line)
145 field = slf.groups()[0].lower()
146 changes[field] = slf.groups()[1]
150 changes[field] += '\n'
152 mlf = re_multi_line_field.match(line)
155 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
156 if first == 1 and changes[field] != "":
157 changes[field] += '\n'
159 changes[field] += mlf.groups()[0] + '\n'
163 if signing_rules == 1 and inside_signature:
164 raise InvalidDscError, index
166 changes["filecontents"] = "".join(lines)
168 if changes.has_key("source"):
169 # Strip the source version in brackets from the source field,
170 # put it in the "source-version" field instead.
171 srcver = re_srchasver.search(changes["source"])
173 changes["source"] = srcver.group(1)
174 changes["source-version"] = srcver.group(2)
177 raise ParseChangesError, error
181 ################################################################################
183 def parse_changes(filename, signing_rules=0):
184 """Parses a changes file and returns a dictionary where each field is a
185 key. The mandatory first argument is the filename of the .changes
188 signing_rules is an optional argument:
190 o If signing_rules == -1, no signature is required.
191 o If signing_rules == 0 (the default), a signature is required.
192 o If signing_rules == 1, it turns on the same strict format checking
195 The rules for (signing_rules == 1)-mode are:
197 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
198 followed by any PGP header data and must end with a blank line.
200 o The data section must end with a blank line and must be followed by
201 "-----BEGIN PGP SIGNATURE-----".
204 changes_in = open_file(filename)
205 content = changes_in.read()
207 return parse_deb822(content, signing_rules)
209 ################################################################################
211 def hash_key(hashname):
212 return '%ssum' % hashname
214 ################################################################################
216 def create_hash(where, files, hashname, hashfunc):
217 """create_hash extends the passed files dict with the given hash by
218 iterating over all files on disk and passing them to the hashing
222 for f in files.keys():
224 file_handle = open_file(f)
225 except CantOpenError:
226 rejmsg.append("Could not open file %s for checksumming" % (f))
228 files[f][hash_key(hashname)] = hashfunc(file_handle)
233 ################################################################################
235 def check_hash(where, files, hashname, hashfunc):
236 """check_hash checks the given hash in the files dict against the actual
237 files on disk. The hash values need to be present consistently in
238 all file entries. It does not modify its input in any way."""
241 for f in files.keys():
245 file_handle = open_file(f)
247 # Check for the hash entry, to not trigger a KeyError.
248 if not files[f].has_key(hash_key(hashname)):
249 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
253 # Actually check the hash for correctness.
254 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
255 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
257 except CantOpenError:
258 # TODO: This happens when the file is in the pool.
259 # warn("Cannot open file %s" % f)
266 ################################################################################
268 def check_size(where, files):
269 """check_size checks the file sizes in the passed files dict against the
273 for f in files.keys():
278 # TODO: This happens when the file is in the pool.
282 actual_size = entry[stat.ST_SIZE]
283 size = int(files[f]["size"])
284 if size != actual_size:
285 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
286 % (f, actual_size, size, where))
289 ################################################################################
291 def check_hash_fields(what, manifest):
292 """check_hash_fields ensures that there are no checksum fields in the
293 given dict that we do not know about."""
296 hashes = map(lambda x: x[0], known_hashes)
297 for field in manifest:
298 if field.startswith("checksums-"):
299 hashname = field.split("-",1)[1]
300 if hashname not in hashes:
301 rejmsg.append("Unsupported checksum field for %s "\
302 "in %s" % (hashname, what))
305 ################################################################################
307 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
308 if format >= version:
309 # The version should contain the specified hash.
312 # Import hashes from the changes
313 rejmsg = parse_checksums(".changes", files, changes, hashname)
317 # We need to calculate the hash because it can't possibly
320 return func(".changes", files, hashname, hashfunc)
322 # We could add the orig which might be in the pool to the files dict to
323 # access the checksums easily.
325 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
326 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
327 in the dsc is correct, i.e. identical to the changes file and if necessary
328 the pool. The latter task is delegated to check_hash."""
331 if not dsc.has_key('Checksums-%s' % (hashname,)):
333 # Import hashes from the dsc
334 parse_checksums(".dsc", dsc_files, dsc, hashname)
336 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
339 ################################################################################
341 def ensure_hashes(changes, dsc, files, dsc_files):
344 # Make sure we recognise the format of the Files: field in the .changes
345 format = changes.get("format", "0.0").split(".", 1)
347 format = int(format[0]), int(format[1])
349 format = int(float(format[0])), 0
351 # We need to deal with the original changes blob, as the fields we need
352 # might not be in the changes dict serialised into the .dak anymore.
353 orig_changes = parse_deb822(changes['filecontents'])
355 # Copy the checksums over to the current changes dict. This will keep
356 # the existing modifications to it intact.
357 for field in orig_changes:
358 if field.startswith('checksums-'):
359 changes[field] = orig_changes[field]
361 # Check for unsupported hashes
362 rejmsg.extend(check_hash_fields(".changes", changes))
363 rejmsg.extend(check_hash_fields(".dsc", dsc))
365 # We have to calculate the hash if we have an earlier changes version than
366 # the hash appears in rather than require it exist in the changes file
367 for hashname, hashfunc, version in known_hashes:
368 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
370 if "source" in changes["architecture"]:
371 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
376 def parse_checksums(where, files, manifest, hashname):
378 field = 'checksums-%s' % hashname
379 if not field in manifest:
381 for line in manifest[field].split('\n'):
384 checksum, size, checkfile = line.strip().split(' ')
385 if not files.has_key(checkfile):
386 # TODO: check for the file's entry in the original files dict, not
387 # the one modified by (auto)byhand and other weird stuff
388 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
389 # (file, hashname, where))
391 if not files[checkfile]["size"] == size:
392 rejmsg.append("%s: size differs for files and checksums-%s entry "\
393 "in %s" % (checkfile, hashname, where))
395 files[checkfile][hash_key(hashname)] = checksum
396 for f in files.keys():
397 if not files[f].has_key(hash_key(hashname)):
398 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
402 ################################################################################
404 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
406 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
409 # Make sure we have a Files: field to parse...
410 if not changes.has_key(field):
411 raise NoFilesFieldError
413 # Make sure we recognise the format of the Files: field
414 format = re_verwithext.search(changes.get("format", "0.0"))
416 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
418 format = format.groups()
419 if format[1] == None:
420 format = int(float(format[0])), 0, format[2]
422 format = int(format[0]), int(format[1]), format[2]
423 if format[2] == None:
427 # format = (1,0) are the only formats we currently accept,
428 # format = (0,0) are missing format headers of which we still
429 # have some in the archive.
430 if format != (1,0) and format != (0,0):
431 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
433 if (format < (1,5) or format > (1,8)):
434 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
435 if field != "files" and format < (1,8):
436 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
438 includes_section = (not is_a_dsc) and field == "files"
440 # Parse each entry/line:
441 for i in changes[field].split('\n'):
445 section = priority = ""
448 (md5, size, section, priority, name) = s
450 (md5, size, name) = s
452 raise ParseChangesError, i
459 (section, component) = extract_component_from_section(section)
461 files[name] = Dict(size=size, section=section,
462 priority=priority, component=component)
463 files[name][hashname] = md5
467 ################################################################################
469 def force_to_utf8(s):
470 """Forces a string to UTF-8. If the string isn't already UTF-8,
471 it's assumed to be ISO-8859-1."""
476 latin1_s = unicode(s,'iso8859-1')
477 return latin1_s.encode('utf-8')
479 def rfc2047_encode(s):
480 """Encodes a (header) string per RFC2047 if necessary. If the
481 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
483 codecs.lookup('ascii')[1](s)
488 codecs.lookup('utf-8')[1](s)
489 h = email.Header.Header(s, 'utf-8', 998)
492 h = email.Header.Header(s, 'iso-8859-1', 998)
495 ################################################################################
497 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
498 # with it. I know - I'll fix the suckage and make things
501 def fix_maintainer (maintainer):
502 """Parses a Maintainer or Changed-By field and returns:
503 (1) an RFC822 compatible version,
504 (2) an RFC2047 compatible version,
508 The name is forced to UTF-8 for both (1) and (3). If the name field
509 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
510 switched to 'email (name)' format."""
511 maintainer = maintainer.strip()
513 return ('', '', '', '')
515 if maintainer.find("<") == -1:
518 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
519 email = maintainer[1:-1]
522 m = re_parse_maintainer.match(maintainer)
524 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
528 # Get an RFC2047 compliant version of the name
529 rfc2047_name = rfc2047_encode(name)
531 # Force the name to be UTF-8
532 name = force_to_utf8(name)
534 if name.find(',') != -1 or name.find('.') != -1:
535 rfc822_maint = "%s (%s)" % (email, name)
536 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
538 rfc822_maint = "%s <%s>" % (name, email)
539 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
541 if email.find("@") == -1 and email.find("buildd_") != 0:
542 raise ParseMaintError, "No @ found in email address part."
544 return (rfc822_maint, rfc2047_maint, name, email)
546 ################################################################################
548 # sendmail wrapper, takes _either_ a message string or a file as arguments
549 def send_mail (message, filename=""):
550 # If we've been passed a string dump it into a temporary file
552 (fd, filename) = tempfile.mkstemp()
553 os.write (fd, message)
557 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
559 raise SendmailFailedError, output
561 # Clean up any temporary files
565 ################################################################################
567 def poolify (source, component):
570 if source[:3] == "lib":
571 return component + source[:4] + '/' + source + '/'
573 return component + source[:1] + '/' + source + '/'
575 ################################################################################
577 def move (src, dest, overwrite = 0, perms = 0664):
578 if os.path.exists(dest) and os.path.isdir(dest):
581 dest_dir = os.path.dirname(dest)
582 if not os.path.exists(dest_dir):
583 umask = os.umask(00000)
584 os.makedirs(dest_dir, 02775)
586 #print "Moving %s to %s..." % (src, dest)
587 if os.path.exists(dest) and os.path.isdir(dest):
588 dest += '/' + os.path.basename(src)
589 # Don't overwrite unless forced to
590 if os.path.exists(dest):
592 fubar("Can't move %s to %s - file already exists." % (src, dest))
594 if not os.access(dest, os.W_OK):
595 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
596 shutil.copy2(src, dest)
597 os.chmod(dest, perms)
600 def copy (src, dest, overwrite = 0, perms = 0664):
601 if os.path.exists(dest) and os.path.isdir(dest):
604 dest_dir = os.path.dirname(dest)
605 if not os.path.exists(dest_dir):
606 umask = os.umask(00000)
607 os.makedirs(dest_dir, 02775)
609 #print "Copying %s to %s..." % (src, dest)
610 if os.path.exists(dest) and os.path.isdir(dest):
611 dest += '/' + os.path.basename(src)
612 # Don't overwrite unless forced to
613 if os.path.exists(dest):
615 raise FileExistsError
617 if not os.access(dest, os.W_OK):
618 raise CantOverwriteError
619 shutil.copy2(src, dest)
620 os.chmod(dest, perms)
622 ################################################################################
625 res = socket.gethostbyaddr(socket.gethostname())
626 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
627 if database_hostname:
628 return database_hostname
632 def which_conf_file ():
633 res = socket.gethostbyaddr(socket.gethostname())
634 if Cnf.get("Config::" + res[0] + "::DakConfig"):
635 return Cnf["Config::" + res[0] + "::DakConfig"]
637 return default_config
639 def which_apt_conf_file ():
640 res = socket.gethostbyaddr(socket.gethostname())
641 if Cnf.get("Config::" + res[0] + "::AptConfig"):
642 return Cnf["Config::" + res[0] + "::AptConfig"]
644 return default_apt_config
646 def which_alias_file():
647 hostname = socket.gethostbyaddr(socket.gethostname())[0]
648 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
649 if os.path.exists(aliasfn):
654 ################################################################################
656 # Escape characters which have meaning to SQL's regex comparison operator ('~')
657 # (woefully incomplete)
660 s = s.replace('+', '\\\\+')
661 s = s.replace('.', '\\\\.')
664 ################################################################################
666 # Perform a substition of template
667 def TemplateSubst(map, filename):
668 file = open_file(filename)
669 template = file.read()
671 template = template.replace(x,map[x])
675 ################################################################################
677 def fubar(msg, exit_code=1):
678 sys.stderr.write("E: %s\n" % (msg))
682 sys.stderr.write("W: %s\n" % (msg))
684 ################################################################################
686 # Returns the user name with a laughable attempt at rfc822 conformancy
687 # (read: removing stray periods).
689 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
691 ################################################################################
701 return ("%d%s" % (c, t))
703 ################################################################################
705 def cc_fix_changes (changes):
706 o = changes.get("architecture", "")
708 del changes["architecture"]
709 changes["architecture"] = {}
711 changes["architecture"][j] = 1
713 # Sort by source name, source version, 'have source', and then by filename
714 def changes_compare (a, b):
716 a_changes = parse_changes(a)
721 b_changes = parse_changes(b)
725 cc_fix_changes (a_changes)
726 cc_fix_changes (b_changes)
728 # Sort by source name
729 a_source = a_changes.get("source")
730 b_source = b_changes.get("source")
731 q = cmp (a_source, b_source)
735 # Sort by source version
736 a_version = a_changes.get("version", "0")
737 b_version = b_changes.get("version", "0")
738 q = apt_pkg.VersionCompare(a_version, b_version)
742 # Sort by 'have source'
743 a_has_source = a_changes["architecture"].get("source")
744 b_has_source = b_changes["architecture"].get("source")
745 if a_has_source and not b_has_source:
747 elif b_has_source and not a_has_source:
750 # Fall back to sort by filename
753 ################################################################################
755 def find_next_free (dest, too_many=100):
758 while os.path.exists(dest) and extra < too_many:
759 dest = orig_dest + '.' + repr(extra)
761 if extra >= too_many:
762 raise NoFreeFilenameError
765 ################################################################################
767 def result_join (original, sep = '\t'):
769 for i in xrange(len(original)):
770 if original[i] == None:
773 list.append(original[i])
774 return sep.join(list)
776 ################################################################################
778 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
780 for line in str.split('\n'):
782 if line or include_blank_lines:
783 out += "%s%s\n" % (prefix, line)
784 # Strip trailing new line
789 ################################################################################
791 def validate_changes_file_arg(filename, require_changes=1):
792 """'filename' is either a .changes or .dak file. If 'filename' is a
793 .dak file, it's changed to be the corresponding .changes file. The
794 function then checks if the .changes file a) exists and b) is
795 readable and returns the .changes filename if so. If there's a
796 problem, the next action depends on the option 'require_changes'
799 o If 'require_changes' == -1, errors are ignored and the .changes
800 filename is returned.
801 o If 'require_changes' == 0, a warning is given and 'None' is returned.
802 o If 'require_changes' == 1, a fatal error is raised.
806 orig_filename = filename
807 if filename.endswith(".dak"):
808 filename = filename[:-4]+".changes"
810 if not filename.endswith(".changes"):
811 error = "invalid file type; not a changes file"
813 if not os.access(filename,os.R_OK):
814 if os.path.exists(filename):
815 error = "permission denied"
817 error = "file not found"
820 if require_changes == 1:
821 fubar("%s: %s." % (orig_filename, error))
822 elif require_changes == 0:
823 warn("Skipping %s - %s" % (orig_filename, error))
825 else: # We only care about the .dak file
830 ################################################################################
833 return (arch != "source" and arch != "all")
835 ################################################################################
837 def join_with_commas_and(list):
838 if len(list) == 0: return "nothing"
839 if len(list) == 1: return list[0]
840 return ", ".join(list[:-1]) + " and " + list[-1]
842 ################################################################################
847 (pkg, version, constraint) = atom
849 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
852 pp_deps.append(pp_dep)
853 return " |".join(pp_deps)
855 ################################################################################
860 ################################################################################
862 # Handle -a, -c and -s arguments; returns them as SQL constraints
863 def parse_args(Options):
867 for suite in split_args(Options["Suite"]):
868 suite_id = database.get_suite_id(suite)
870 warn("suite '%s' not recognised." % (suite))
872 suite_ids_list.append(suite_id)
874 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
876 fubar("No valid suite given.")
881 if Options["Component"]:
882 component_ids_list = []
883 for component in split_args(Options["Component"]):
884 component_id = database.get_component_id(component)
885 if component_id == -1:
886 warn("component '%s' not recognised." % (component))
888 component_ids_list.append(component_id)
889 if component_ids_list:
890 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
892 fubar("No valid component given.")
896 # Process architecture
897 con_architectures = ""
898 if Options["Architecture"]:
901 for architecture in split_args(Options["Architecture"]):
902 if architecture == "source":
905 architecture_id = database.get_architecture_id(architecture)
906 if architecture_id == -1:
907 warn("architecture '%s' not recognised." % (architecture))
909 arch_ids_list.append(architecture_id)
911 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
914 fubar("No valid architecture given.")
918 return (con_suites, con_architectures, con_components, check_source)
920 ################################################################################
922 # Inspired(tm) by Bryn Keller's print_exc_plus (See
923 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
926 tb = sys.exc_info()[2]
935 traceback.print_exc()
937 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
938 frame.f_code.co_filename,
940 for key, value in frame.f_locals.items():
941 print "\t%20s = " % key,
945 print "<unable to print>"
947 ################################################################################
949 def try_with_debug(function):
957 ################################################################################
959 # Function for use in sorting lists of architectures.
960 # Sorts normally except that 'source' dominates all others.
962 def arch_compare_sw (a, b):
963 if a == "source" and b == "source":
972 ################################################################################
974 # Split command line arguments which can be separated by either commas
975 # or whitespace. If dwim is set, it will complain about string ending
976 # in comma since this usually means someone did 'dak ls -a i386, m68k
977 # foo' or something and the inevitable confusion resulting from 'm68k'
978 # being treated as an argument is undesirable.
980 def split_args (s, dwim=1):
981 if s.find(",") == -1:
984 if s[-1:] == "," and dwim:
985 fubar("split_args: found trailing comma, spurious space maybe?")
988 ################################################################################
990 def Dict(**dict): return dict
992 ########################################
994 # Our very own version of commands.getouputstatus(), hacked to support
996 def gpgv_get_status_output(cmd, status_read, status_write):
997 cmd = ['/bin/sh', '-c', cmd]
998 p2cread, p2cwrite = os.pipe()
999 c2pread, c2pwrite = os.pipe()
1000 errout, errin = os.pipe()
1010 for i in range(3, 256):
1011 if i != status_write:
1017 os.execvp(cmd[0], cmd)
1023 os.dup2(c2pread, c2pwrite)
1024 os.dup2(errout, errin)
1026 output = status = ""
1028 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1031 r = os.read(fd, 8196)
1033 more_data.append(fd)
1034 if fd == c2pwrite or fd == errin:
1036 elif fd == status_read:
1039 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1041 pid, exit_status = os.waitpid(pid, 0)
1043 os.close(status_write)
1044 os.close(status_read)
1054 return output, status, exit_status
1056 ################################################################################
1058 def process_gpgv_output(status):
1059 # Process the status-fd output
1062 for line in status.split('\n'):
1066 split = line.split()
1068 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1070 (gnupg, keyword) = split[:2]
1071 if gnupg != "[GNUPG:]":
1072 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1075 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1076 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1079 keywords[keyword] = args
1081 return (keywords, internal_error)
1083 ################################################################################
1085 def retrieve_key (filename, keyserver=None, keyring=None):
1086 """Retrieve the key that signed 'filename' from 'keyserver' and
1087 add it to 'keyring'. Returns nothing on success, or an error message
1090 # Defaults for keyserver and keyring
1092 keyserver = Cnf["Dinstall::KeyServer"]
1094 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1096 # Ensure the filename contains no shell meta-characters or other badness
1097 if not re_taint_free.match(filename):
1098 return "%s: tainted filename" % (filename)
1100 # Invoke gpgv on the file
1101 status_read, status_write = os.pipe();
1102 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1103 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1105 # Process the status-fd output
1106 (keywords, internal_error) = process_gpgv_output(status)
1108 return internal_error
1110 if not keywords.has_key("NO_PUBKEY"):
1111 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1113 fingerprint = keywords["NO_PUBKEY"][0]
1114 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1115 # it'll try to create a lockfile in /dev. A better solution might
1116 # be a tempfile or something.
1117 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1118 % (Cnf["Dinstall::SigningKeyring"])
1119 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1120 % (keyring, keyserver, fingerprint)
1121 (result, output) = commands.getstatusoutput(cmd)
1123 return "'%s' failed with exit code %s" % (cmd, result)
1127 ################################################################################
1129 def gpg_keyring_args(keyrings=None):
1131 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1133 return " ".join(["--keyring %s" % x for x in keyrings])
1135 ################################################################################
1137 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1138 """Check the signature of a file and return the fingerprint if the
1139 signature is valid or 'None' if it's not. The first argument is the
1140 filename whose signature should be checked. The second argument is a
1141 reject function and is called when an error is found. The reject()
1142 function must allow for two arguments: the first is the error message,
1143 the second is an optional prefix string. It's possible for reject()
1144 to be called more than once during an invocation of check_signature().
1145 The third argument is optional and is the name of the files the
1146 detached signature applies to. The fourth argument is optional and is
1147 a *list* of keyrings to use. 'autofetch' can either be None, True or
1148 False. If None, the default behaviour specified in the config will be
1151 # Ensure the filename contains no shell meta-characters or other badness
1152 if not re_taint_free.match(sig_filename):
1153 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1156 if data_filename and not re_taint_free.match(data_filename):
1157 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1161 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1163 # Autofetch the signing key if that's enabled
1164 if autofetch == None:
1165 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1167 error_msg = retrieve_key(sig_filename)
1172 # Build the command line
1173 status_read, status_write = os.pipe();
1174 cmd = "gpgv --status-fd %s %s %s %s" % (
1175 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1177 # Invoke gpgv on the file
1178 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1180 # Process the status-fd output
1181 (keywords, internal_error) = process_gpgv_output(status)
1183 # If we failed to parse the status-fd output, let's just whine and bail now
1185 reject("internal error while performing signature check on %s." % (sig_filename))
1186 reject(internal_error, "")
1187 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1191 # Now check for obviously bad things in the processed output
1192 if keywords.has_key("KEYREVOKED"):
1193 reject("The key used to sign %s has been revoked." % (sig_filename))
1195 if keywords.has_key("BADSIG"):
1196 reject("bad signature on %s." % (sig_filename))
1198 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1199 reject("failed to check signature on %s." % (sig_filename))
1201 if keywords.has_key("NO_PUBKEY"):
1202 args = keywords["NO_PUBKEY"]
1205 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1207 if keywords.has_key("BADARMOR"):
1208 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1210 if keywords.has_key("NODATA"):
1211 reject("no signature found in %s." % (sig_filename))
1213 if keywords.has_key("EXPKEYSIG"):
1214 args = keywords["EXPKEYSIG"]
1217 reject("Signature made by expired key 0x%s" % (key))
1219 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1220 args = keywords["KEYEXPIRED"]
1224 if timestamp.count("T") == 0:
1225 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1227 expiredate = timestamp
1228 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1234 # Next check gpgv exited with a zero return code
1236 reject("gpgv failed while checking %s." % (sig_filename))
1238 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1240 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1243 # Sanity check the good stuff we expect
1244 if not keywords.has_key("VALIDSIG"):
1245 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1248 args = keywords["VALIDSIG"]
1250 reject("internal error while checking signature on %s." % (sig_filename))
1253 fingerprint = args[0]
1254 if not keywords.has_key("GOODSIG"):
1255 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1257 if not keywords.has_key("SIG_ID"):
1258 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1261 # Finally ensure there's not something we don't recognise
1262 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1263 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1264 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1266 for keyword in keywords.keys():
1267 if not known_keywords.has_key(keyword):
1268 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1276 ################################################################################
1278 def gpg_get_key_addresses(fingerprint):
1279 """retreive email addresses from gpg key uids for a given fingerprint"""
1280 addresses = key_uid_email_cache.get(fingerprint)
1281 if addresses != None:
1284 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1285 % (gpg_keyring_args(), fingerprint)
1286 (result, output) = commands.getstatusoutput(cmd)
1288 for l in output.split('\n'):
1289 m = re_gpg_uid.match(l)
1291 addresses.add(m.group(1))
1292 key_uid_email_cache[fingerprint] = addresses
1295 ################################################################################
1297 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1299 def wrap(paragraph, max_length, prefix=""):
1303 words = paragraph.split()
1306 word_size = len(word)
1307 if word_size > max_length:
1309 s += line + '\n' + prefix
1310 s += word + '\n' + prefix
1313 new_length = len(line) + word_size + 1
1314 if new_length > max_length:
1315 s += line + '\n' + prefix
1328 ################################################################################
1330 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1331 # Returns fixed 'src'
1332 def clean_symlink (src, dest, root):
1333 src = src.replace(root, '', 1)
1334 dest = dest.replace(root, '', 1)
1335 dest = os.path.dirname(dest)
1336 new_src = '../' * len(dest.split('/'))
1337 return new_src + src
1339 ################################################################################
1341 def temp_filename(directory=None, prefix="dak", suffix=""):
1342 """Return a secure and unique filename by pre-creating it.
1343 If 'directory' is non-null, it will be the directory the file is pre-created in.
1344 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1345 If 'suffix' is non-null, the filename will end with it.
1347 Returns a pair (fd, name).
1350 return tempfile.mkstemp(suffix, prefix, directory)
1352 ################################################################################
1354 # checks if the user part of the email is listed in the alias file
1356 def is_email_alias(email):
1358 if alias_cache == None:
1359 aliasfn = which_alias_file()
1362 for l in open(aliasfn):
1363 alias_cache.add(l.split(':')[0])
1364 uid = email.split('@')[0]
1365 return uid in alias_cache
1367 ################################################################################
1371 Cnf = apt_pkg.newConfiguration()
1372 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1374 if which_conf_file() != default_config:
1375 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1377 ################################################################################