2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
29 from dak_exceptions import *
31 ################################################################################
33 re_comments = re.compile(r"\#.*")
34 re_no_epoch = re.compile(r"^\d+\:")
35 re_no_revision = re.compile(r"-[^-]+$")
36 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
37 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
38 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
39 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
41 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
42 re_multi_line_field = re.compile(r"^\s(.*)")
43 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
45 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
46 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
48 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
49 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
51 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
53 default_config = "/etc/dak/dak.conf"
54 default_apt_config = "/etc/dak/apt.conf"
57 key_uid_email_cache = {}
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))]
63 ################################################################################
65 def open_file(filename, mode='r'):
67 f = open(filename, mode)
69 raise CantOpenError, filename
72 ################################################################################
74 def our_raw_input(prompt=""):
76 sys.stdout.write(prompt)
82 sys.stderr.write("\nUser interrupt (^D).\n")
85 ################################################################################
87 def extract_component_from_section(section):
90 if section.find('/') != -1:
91 component = section.split('/')[0]
93 # Expand default component
95 if Cnf.has_key("Component::%s" % section):
100 return (section, component)
102 ################################################################################
104 def parse_deb822(contents, signing_rules=0):
108 # Split the lines in the input, keeping the linebreaks.
109 lines = contents.splitlines(True)
112 raise ParseChangesError, "[Empty changes file]"
114 # Reindex by line number so we can easily verify the format of
120 indexed_lines[index] = line[:-1]
124 num_of_lines = len(indexed_lines.keys())
127 while index < num_of_lines:
129 line = indexed_lines[index]
131 if signing_rules == 1:
133 if index > num_of_lines:
134 raise InvalidDscError, index
135 line = indexed_lines[index]
136 if not line.startswith("-----BEGIN PGP SIGNATURE"):
137 raise InvalidDscError, index
142 if line.startswith("-----BEGIN PGP SIGNATURE"):
144 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
146 if signing_rules == 1:
147 while index < num_of_lines and line != "":
149 line = indexed_lines[index]
151 # If we're not inside the signed data, don't process anything
152 if signing_rules >= 0 and not inside_signature:
154 slf = re_single_line_field.match(line)
156 field = slf.groups()[0].lower()
157 changes[field] = slf.groups()[1]
161 changes[field] += '\n'
163 mlf = re_multi_line_field.match(line)
166 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
167 if first == 1 and changes[field] != "":
168 changes[field] += '\n'
170 changes[field] += mlf.groups()[0] + '\n'
174 if signing_rules == 1 and inside_signature:
175 raise InvalidDscError, index
177 changes["filecontents"] = "".join(lines)
179 if changes.has_key("source"):
180 # Strip the source version in brackets from the source field,
181 # put it in the "source-version" field instead.
182 srcver = re_srchasver.search(changes["source"])
184 changes["source"] = srcver.group(1)
185 changes["source-version"] = srcver.group(2)
188 raise ParseChangesError, error
192 ################################################################################
194 def parse_changes(filename, signing_rules=0):
195 """Parses a changes file and returns a dictionary where each field is a
196 key. The mandatory first argument is the filename of the .changes
199 signing_rules is an optional argument:
201 o If signing_rules == -1, no signature is required.
202 o If signing_rules == 0 (the default), a signature is required.
203 o If signing_rules == 1, it turns on the same strict format checking
206 The rules for (signing_rules == 1)-mode are:
208 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
209 followed by any PGP header data and must end with a blank line.
211 o The data section must end with a blank line and must be followed by
212 "-----BEGIN PGP SIGNATURE-----".
215 changes_in = open_file(filename)
216 content = changes_in.read()
218 return parse_deb822(content, signing_rules)
220 ################################################################################
222 def hash_key(hashname):
223 return '%ssum' % hashname
225 ################################################################################
227 def create_hash(where, files, hashname, hashfunc):
228 """create_hash extends the passed files dict with the given hash by
229 iterating over all files on disk and passing them to the hashing
233 for f in files.keys():
235 file_handle = open_file(f)
236 except CantOpenError:
237 rejmsg.append("Could not open file %s for checksumming" % (f))
239 files[f][hash_key(hashname)] = hashfunc(file_handle)
244 ################################################################################
246 def check_hash(where, files, hashname, hashfunc):
247 """check_hash checks the given hash in the files dict against the actual
248 files on disk. The hash values need to be present consistently in
249 all file entries. It does not modify its input in any way."""
252 for f in files.keys():
256 file_handle = open_file(f)
258 # Check for the hash entry, to not trigger a KeyError.
259 if not files[f].has_key(hash_key(hashname)):
260 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
264 # Actually check the hash for correctness.
265 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
266 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
268 except CantOpenError:
269 # TODO: This happens when the file is in the pool.
270 # warn("Cannot open file %s" % f)
277 ################################################################################
279 def check_size(where, files):
280 """check_size checks the file sizes in the passed files dict against the
284 for f in files.keys():
289 # TODO: This happens when the file is in the pool.
293 actual_size = entry[stat.ST_SIZE]
294 size = int(files[f]["size"])
295 if size != actual_size:
296 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
297 % (f, actual_size, size, where))
300 ################################################################################
302 def check_hash_fields(what, manifest):
303 """check_hash_fields ensures that there are no checksum fields in the
304 given dict that we do not know about."""
307 hashes = map(lambda x: x[0], known_hashes)
308 for field in manifest:
309 if field.startswith("checksums-"):
310 hashname = field.split("-",1)[1]
311 if hashname not in hashes:
312 rejmsg.append("Unsupported checksum field for %s "\
313 "in %s" % (hashname, what))
316 ################################################################################
318 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
319 if format >= version:
320 # The version should contain the specified hash.
323 # Import hashes from the changes
324 rejmsg = parse_checksums(".changes", files, changes, hashname)
328 # We need to calculate the hash because it can't possibly
331 return func(".changes", files, hashname, hashfunc)
333 # We could add the orig which might be in the pool to the files dict to
334 # access the checksums easily.
336 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
337 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
338 in the dsc is correct, i.e. identical to the changes file and if necessary
339 the pool. The latter task is delegated to check_hash."""
342 if not dsc.has_key('Checksums-%s' % (hashname,)):
344 # Import hashes from the dsc
345 parse_checksums(".dsc", dsc_files, dsc, hashname)
347 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
350 ################################################################################
352 def ensure_hashes(changes, dsc, files, dsc_files):
355 # Make sure we recognise the format of the Files: field in the .changes
356 format = changes.get("format", "0.0").split(".", 1)
358 format = int(format[0]), int(format[1])
360 format = int(float(format[0])), 0
362 # We need to deal with the original changes blob, as the fields we need
363 # might not be in the changes dict serialised into the .dak anymore.
364 orig_changes = parse_deb822(changes['filecontents'])
366 # Copy the checksums over to the current changes dict. This will keep
367 # the existing modifications to it intact.
368 for field in orig_changes:
369 if field.startswith('checksums-'):
370 changes[field] = orig_changes[field]
372 # Check for unsupported hashes
373 rejmsg.extend(check_hash_fields(".changes", changes))
374 rejmsg.extend(check_hash_fields(".dsc", dsc))
376 # We have to calculate the hash if we have an earlier changes version than
377 # the hash appears in rather than require it exist in the changes file
378 for hashname, hashfunc, version in known_hashes:
379 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
381 if "source" in changes["architecture"]:
382 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
387 def parse_checksums(where, files, manifest, hashname):
389 field = 'checksums-%s' % hashname
390 if not field in manifest:
392 input = manifest[field]
393 for line in input.split('\n'):
396 hash, size, file = line.strip().split(' ')
397 if not files.has_key(file):
398 rejmsg.append("%s: not present in files but in checksums-%s in %s" %
399 (file, hashname, where))
400 if not files[file]["size"] == size:
401 rejmsg.append("%s: size differs for files and checksums-%s entry "\
402 "in %s" % (file, hashname, where))
403 files[file][hash_key(hashname)] = hash
404 for f in files.keys():
405 if not files[f].has_key(hash_key(hashname)):
406 rejmsg.append("%s: no entry in checksums-%s in %s" % (file,
410 ################################################################################
412 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
414 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
417 # Make sure we have a Files: field to parse...
418 if not changes.has_key(field):
419 raise NoFilesFieldError
421 # Make sure we recognise the format of the Files: field
422 format = re_verwithext.search(changes.get("format", "0.0"))
424 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
426 format = format.groups()
427 if format[1] == None:
428 format = int(float(format[0])), 0, format[2]
430 format = int(format[0]), int(format[1]), format[2]
431 if format[2] == None:
436 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
438 if (format < (1,5) or format > (1,8)):
439 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
440 if field != "files" and format < (1,8):
441 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
443 includes_section = (not is_a_dsc) and field == "files"
445 # Parse each entry/line:
446 for i in changes[field].split('\n'):
450 section = priority = ""
453 (md5, size, section, priority, name) = s
455 (md5, size, name) = s
457 raise ParseChangesError, i
464 (section, component) = extract_component_from_section(section)
466 files[name] = Dict(size=size, section=section,
467 priority=priority, component=component)
468 files[name][hashname] = md5
472 ################################################################################
474 def force_to_utf8(s):
475 """Forces a string to UTF-8. If the string isn't already UTF-8,
476 it's assumed to be ISO-8859-1."""
481 latin1_s = unicode(s,'iso8859-1')
482 return latin1_s.encode('utf-8')
484 def rfc2047_encode(s):
485 """Encodes a (header) string per RFC2047 if necessary. If the
486 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
488 codecs.lookup('ascii')[1](s)
493 codecs.lookup('utf-8')[1](s)
494 h = email.Header.Header(s, 'utf-8', 998)
497 h = email.Header.Header(s, 'iso-8859-1', 998)
500 ################################################################################
502 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
503 # with it. I know - I'll fix the suckage and make things
506 def fix_maintainer (maintainer):
507 """Parses a Maintainer or Changed-By field and returns:
508 (1) an RFC822 compatible version,
509 (2) an RFC2047 compatible version,
513 The name is forced to UTF-8 for both (1) and (3). If the name field
514 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
515 switched to 'email (name)' format."""
516 maintainer = maintainer.strip()
518 return ('', '', '', '')
520 if maintainer.find("<") == -1:
523 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
524 email = maintainer[1:-1]
527 m = re_parse_maintainer.match(maintainer)
529 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
533 # Get an RFC2047 compliant version of the name
534 rfc2047_name = rfc2047_encode(name)
536 # Force the name to be UTF-8
537 name = force_to_utf8(name)
539 if name.find(',') != -1 or name.find('.') != -1:
540 rfc822_maint = "%s (%s)" % (email, name)
541 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
543 rfc822_maint = "%s <%s>" % (name, email)
544 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
546 if email.find("@") == -1 and email.find("buildd_") != 0:
547 raise ParseMaintError, "No @ found in email address part."
549 return (rfc822_maint, rfc2047_maint, name, email)
551 ################################################################################
553 # sendmail wrapper, takes _either_ a message string or a file as arguments
554 def send_mail (message, filename=""):
555 # If we've been passed a string dump it into a temporary file
557 filename = tempfile.mktemp()
558 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
559 os.write (fd, message)
563 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
565 raise SendmailFailedError, output
567 # Clean up any temporary files
571 ################################################################################
573 def poolify (source, component):
576 if source[:3] == "lib":
577 return component + source[:4] + '/' + source + '/'
579 return component + source[:1] + '/' + source + '/'
581 ################################################################################
583 def move (src, dest, overwrite = 0, perms = 0664):
584 if os.path.exists(dest) and os.path.isdir(dest):
587 dest_dir = os.path.dirname(dest)
588 if not os.path.exists(dest_dir):
589 umask = os.umask(00000)
590 os.makedirs(dest_dir, 02775)
592 #print "Moving %s to %s..." % (src, dest)
593 if os.path.exists(dest) and os.path.isdir(dest):
594 dest += '/' + os.path.basename(src)
595 # Don't overwrite unless forced to
596 if os.path.exists(dest):
598 fubar("Can't move %s to %s - file already exists." % (src, dest))
600 if not os.access(dest, os.W_OK):
601 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
602 shutil.copy2(src, dest)
603 os.chmod(dest, perms)
606 def copy (src, dest, overwrite = 0, perms = 0664):
607 if os.path.exists(dest) and os.path.isdir(dest):
610 dest_dir = os.path.dirname(dest)
611 if not os.path.exists(dest_dir):
612 umask = os.umask(00000)
613 os.makedirs(dest_dir, 02775)
615 #print "Copying %s to %s..." % (src, dest)
616 if os.path.exists(dest) and os.path.isdir(dest):
617 dest += '/' + os.path.basename(src)
618 # Don't overwrite unless forced to
619 if os.path.exists(dest):
621 raise FileExistsError
623 if not os.access(dest, os.W_OK):
624 raise CantOverwriteError
625 shutil.copy2(src, dest)
626 os.chmod(dest, perms)
628 ################################################################################
631 res = socket.gethostbyaddr(socket.gethostname())
632 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
633 if database_hostname:
634 return database_hostname
638 def which_conf_file ():
639 res = socket.gethostbyaddr(socket.gethostname())
640 if Cnf.get("Config::" + res[0] + "::DakConfig"):
641 return Cnf["Config::" + res[0] + "::DakConfig"]
643 return default_config
645 def which_apt_conf_file ():
646 res = socket.gethostbyaddr(socket.gethostname())
647 if Cnf.get("Config::" + res[0] + "::AptConfig"):
648 return Cnf["Config::" + res[0] + "::AptConfig"]
650 return default_apt_config
652 def which_alias_file():
653 hostname = socket.gethostbyaddr(socket.gethostname())[0]
654 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
655 if os.path.exists(aliasfn):
660 ################################################################################
662 # Escape characters which have meaning to SQL's regex comparison operator ('~')
663 # (woefully incomplete)
666 s = s.replace('+', '\\\\+')
667 s = s.replace('.', '\\\\.')
670 ################################################################################
672 # Perform a substition of template
673 def TemplateSubst(map, filename):
674 file = open_file(filename)
675 template = file.read()
677 template = template.replace(x,map[x])
681 ################################################################################
683 def fubar(msg, exit_code=1):
684 sys.stderr.write("E: %s\n" % (msg))
688 sys.stderr.write("W: %s\n" % (msg))
690 ################################################################################
692 # Returns the user name with a laughable attempt at rfc822 conformancy
693 # (read: removing stray periods).
695 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
697 ################################################################################
707 return ("%d%s" % (c, t))
709 ################################################################################
711 def cc_fix_changes (changes):
712 o = changes.get("architecture", "")
714 del changes["architecture"]
715 changes["architecture"] = {}
717 changes["architecture"][j] = 1
719 # Sort by source name, source version, 'have source', and then by filename
720 def changes_compare (a, b):
722 a_changes = parse_changes(a)
727 b_changes = parse_changes(b)
731 cc_fix_changes (a_changes)
732 cc_fix_changes (b_changes)
734 # Sort by source name
735 a_source = a_changes.get("source")
736 b_source = b_changes.get("source")
737 q = cmp (a_source, b_source)
741 # Sort by source version
742 a_version = a_changes.get("version", "0")
743 b_version = b_changes.get("version", "0")
744 q = apt_pkg.VersionCompare(a_version, b_version)
748 # Sort by 'have source'
749 a_has_source = a_changes["architecture"].get("source")
750 b_has_source = b_changes["architecture"].get("source")
751 if a_has_source and not b_has_source:
753 elif b_has_source and not a_has_source:
756 # Fall back to sort by filename
759 ################################################################################
761 def find_next_free (dest, too_many=100):
764 while os.path.exists(dest) and extra < too_many:
765 dest = orig_dest + '.' + repr(extra)
767 if extra >= too_many:
768 raise NoFreeFilenameError
771 ################################################################################
773 def result_join (original, sep = '\t'):
775 for i in xrange(len(original)):
776 if original[i] == None:
779 list.append(original[i])
780 return sep.join(list)
782 ################################################################################
784 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
786 for line in str.split('\n'):
788 if line or include_blank_lines:
789 out += "%s%s\n" % (prefix, line)
790 # Strip trailing new line
795 ################################################################################
797 def validate_changes_file_arg(filename, require_changes=1):
798 """'filename' is either a .changes or .dak file. If 'filename' is a
799 .dak file, it's changed to be the corresponding .changes file. The
800 function then checks if the .changes file a) exists and b) is
801 readable and returns the .changes filename if so. If there's a
802 problem, the next action depends on the option 'require_changes'
805 o If 'require_changes' == -1, errors are ignored and the .changes
806 filename is returned.
807 o If 'require_changes' == 0, a warning is given and 'None' is returned.
808 o If 'require_changes' == 1, a fatal error is raised.
812 orig_filename = filename
813 if filename.endswith(".dak"):
814 filename = filename[:-4]+".changes"
816 if not filename.endswith(".changes"):
817 error = "invalid file type; not a changes file"
819 if not os.access(filename,os.R_OK):
820 if os.path.exists(filename):
821 error = "permission denied"
823 error = "file not found"
826 if require_changes == 1:
827 fubar("%s: %s." % (orig_filename, error))
828 elif require_changes == 0:
829 warn("Skipping %s - %s" % (orig_filename, error))
831 else: # We only care about the .dak file
836 ################################################################################
839 return (arch != "source" and arch != "all")
841 ################################################################################
843 def join_with_commas_and(list):
844 if len(list) == 0: return "nothing"
845 if len(list) == 1: return list[0]
846 return ", ".join(list[:-1]) + " and " + list[-1]
848 ################################################################################
853 (pkg, version, constraint) = atom
855 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
858 pp_deps.append(pp_dep)
859 return " |".join(pp_deps)
861 ################################################################################
866 ################################################################################
868 # Handle -a, -c and -s arguments; returns them as SQL constraints
869 def parse_args(Options):
873 for suite in split_args(Options["Suite"]):
874 suite_id = database.get_suite_id(suite)
876 warn("suite '%s' not recognised." % (suite))
878 suite_ids_list.append(suite_id)
880 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
882 fubar("No valid suite given.")
887 if Options["Component"]:
888 component_ids_list = []
889 for component in split_args(Options["Component"]):
890 component_id = database.get_component_id(component)
891 if component_id == -1:
892 warn("component '%s' not recognised." % (component))
894 component_ids_list.append(component_id)
895 if component_ids_list:
896 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
898 fubar("No valid component given.")
902 # Process architecture
903 con_architectures = ""
904 if Options["Architecture"]:
907 for architecture in split_args(Options["Architecture"]):
908 if architecture == "source":
911 architecture_id = database.get_architecture_id(architecture)
912 if architecture_id == -1:
913 warn("architecture '%s' not recognised." % (architecture))
915 arch_ids_list.append(architecture_id)
917 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
920 fubar("No valid architecture given.")
924 return (con_suites, con_architectures, con_components, check_source)
926 ################################################################################
928 # Inspired(tm) by Bryn Keller's print_exc_plus (See
929 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
932 tb = sys.exc_info()[2]
941 traceback.print_exc()
943 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
944 frame.f_code.co_filename,
946 for key, value in frame.f_locals.items():
947 print "\t%20s = " % key,
951 print "<unable to print>"
953 ################################################################################
955 def try_with_debug(function):
963 ################################################################################
965 # Function for use in sorting lists of architectures.
966 # Sorts normally except that 'source' dominates all others.
968 def arch_compare_sw (a, b):
969 if a == "source" and b == "source":
978 ################################################################################
980 # Split command line arguments which can be separated by either commas
981 # or whitespace. If dwim is set, it will complain about string ending
982 # in comma since this usually means someone did 'dak ls -a i386, m68k
983 # foo' or something and the inevitable confusion resulting from 'm68k'
984 # being treated as an argument is undesirable.
986 def split_args (s, dwim=1):
987 if s.find(",") == -1:
990 if s[-1:] == "," and dwim:
991 fubar("split_args: found trailing comma, spurious space maybe?")
994 ################################################################################
996 def Dict(**dict): return dict
998 ########################################
1000 # Our very own version of commands.getouputstatus(), hacked to support
1002 def gpgv_get_status_output(cmd, status_read, status_write):
1003 cmd = ['/bin/sh', '-c', cmd]
1004 p2cread, p2cwrite = os.pipe()
1005 c2pread, c2pwrite = os.pipe()
1006 errout, errin = os.pipe()
1016 for i in range(3, 256):
1017 if i != status_write:
1023 os.execvp(cmd[0], cmd)
1029 os.dup2(c2pread, c2pwrite)
1030 os.dup2(errout, errin)
1032 output = status = ""
1034 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1037 r = os.read(fd, 8196)
1039 more_data.append(fd)
1040 if fd == c2pwrite or fd == errin:
1042 elif fd == status_read:
1045 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1047 pid, exit_status = os.waitpid(pid, 0)
1049 os.close(status_write)
1050 os.close(status_read)
1060 return output, status, exit_status
1062 ################################################################################
1064 def process_gpgv_output(status):
1065 # Process the status-fd output
1068 for line in status.split('\n'):
1072 split = line.split()
1074 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1076 (gnupg, keyword) = split[:2]
1077 if gnupg != "[GNUPG:]":
1078 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1081 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1082 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1085 keywords[keyword] = args
1087 return (keywords, internal_error)
1089 ################################################################################
1091 def retrieve_key (filename, keyserver=None, keyring=None):
1092 """Retrieve the key that signed 'filename' from 'keyserver' and
1093 add it to 'keyring'. Returns nothing on success, or an error message
1096 # Defaults for keyserver and keyring
1098 keyserver = Cnf["Dinstall::KeyServer"]
1100 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1102 # Ensure the filename contains no shell meta-characters or other badness
1103 if not re_taint_free.match(filename):
1104 return "%s: tainted filename" % (filename)
1106 # Invoke gpgv on the file
1107 status_read, status_write = os.pipe();
1108 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1109 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1111 # Process the status-fd output
1112 (keywords, internal_error) = process_gpgv_output(status)
1114 return internal_error
1116 if not keywords.has_key("NO_PUBKEY"):
1117 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1119 fingerprint = keywords["NO_PUBKEY"][0]
1120 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1121 # it'll try to create a lockfile in /dev. A better solution might
1122 # be a tempfile or something.
1123 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1124 % (Cnf["Dinstall::SigningKeyring"])
1125 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1126 % (keyring, keyserver, fingerprint)
1127 (result, output) = commands.getstatusoutput(cmd)
1129 return "'%s' failed with exit code %s" % (cmd, result)
1133 ################################################################################
1135 def gpg_keyring_args(keyrings=None):
1137 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1139 return " ".join(["--keyring %s" % x for x in keyrings])
1141 ################################################################################
1143 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1144 """Check the signature of a file and return the fingerprint if the
1145 signature is valid or 'None' if it's not. The first argument is the
1146 filename whose signature should be checked. The second argument is a
1147 reject function and is called when an error is found. The reject()
1148 function must allow for two arguments: the first is the error message,
1149 the second is an optional prefix string. It's possible for reject()
1150 to be called more than once during an invocation of check_signature().
1151 The third argument is optional and is the name of the files the
1152 detached signature applies to. The fourth argument is optional and is
1153 a *list* of keyrings to use. 'autofetch' can either be None, True or
1154 False. If None, the default behaviour specified in the config will be
1157 # Ensure the filename contains no shell meta-characters or other badness
1158 if not re_taint_free.match(sig_filename):
1159 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1162 if data_filename and not re_taint_free.match(data_filename):
1163 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1167 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1169 # Autofetch the signing key if that's enabled
1170 if autofetch == None:
1171 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1173 error_msg = retrieve_key(sig_filename)
1178 # Build the command line
1179 status_read, status_write = os.pipe();
1180 cmd = "gpgv --status-fd %s %s %s %s" % (
1181 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1183 # Invoke gpgv on the file
1184 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1186 # Process the status-fd output
1187 (keywords, internal_error) = process_gpgv_output(status)
1189 # If we failed to parse the status-fd output, let's just whine and bail now
1191 reject("internal error while performing signature check on %s." % (sig_filename))
1192 reject(internal_error, "")
1193 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1197 # Now check for obviously bad things in the processed output
1198 if keywords.has_key("KEYREVOKED"):
1199 reject("The key used to sign %s has been revoked." % (sig_filename))
1201 if keywords.has_key("BADSIG"):
1202 reject("bad signature on %s." % (sig_filename))
1204 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1205 reject("failed to check signature on %s." % (sig_filename))
1207 if keywords.has_key("NO_PUBKEY"):
1208 args = keywords["NO_PUBKEY"]
1211 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1213 if keywords.has_key("BADARMOR"):
1214 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1216 if keywords.has_key("NODATA"):
1217 reject("no signature found in %s." % (sig_filename))
1219 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1220 args = keywords["KEYEXPIRED"]
1223 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1229 # Next check gpgv exited with a zero return code
1231 reject("gpgv failed while checking %s." % (sig_filename))
1233 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1235 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1238 # Sanity check the good stuff we expect
1239 if not keywords.has_key("VALIDSIG"):
1240 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1243 args = keywords["VALIDSIG"]
1245 reject("internal error while checking signature on %s." % (sig_filename))
1248 fingerprint = args[0]
1249 if not keywords.has_key("GOODSIG"):
1250 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1252 if not keywords.has_key("SIG_ID"):
1253 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1256 # Finally ensure there's not something we don't recognise
1257 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1258 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1259 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1261 for keyword in keywords.keys():
1262 if not known_keywords.has_key(keyword):
1263 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1271 ################################################################################
1273 def gpg_get_key_addresses(fingerprint):
1274 """retreive email addresses from gpg key uids for a given fingerprint"""
1275 addresses = key_uid_email_cache.get(fingerprint)
1276 if addresses != None:
1279 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1280 % (gpg_keyring_args(), fingerprint)
1281 (result, output) = commands.getstatusoutput(cmd)
1283 for l in output.split('\n'):
1284 m = re_gpg_uid.match(l)
1286 addresses.add(m.group(1))
1287 key_uid_email_cache[fingerprint] = addresses
1290 ################################################################################
1292 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1294 def wrap(paragraph, max_length, prefix=""):
1298 words = paragraph.split()
1301 word_size = len(word)
1302 if word_size > max_length:
1304 s += line + '\n' + prefix
1305 s += word + '\n' + prefix
1308 new_length = len(line) + word_size + 1
1309 if new_length > max_length:
1310 s += line + '\n' + prefix
1323 ################################################################################
1325 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1326 # Returns fixed 'src'
1327 def clean_symlink (src, dest, root):
1328 src = src.replace(root, '', 1)
1329 dest = dest.replace(root, '', 1)
1330 dest = os.path.dirname(dest)
1331 new_src = '../' * len(dest.split('/'))
1332 return new_src + src
1334 ################################################################################
1336 def temp_filename(directory=None, dotprefix=None, perms=0700):
1337 """Return a secure and unique filename by pre-creating it.
1338 If 'directory' is non-null, it will be the directory the file is pre-created in.
1339 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1342 old_tempdir = tempfile.tempdir
1343 tempfile.tempdir = directory
1345 filename = tempfile.mktemp()
1348 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1349 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1353 tempfile.tempdir = old_tempdir
1357 ################################################################################
1359 # checks if the user part of the email is listed in the alias file
1361 def is_email_alias(email):
1363 if alias_cache == None:
1364 aliasfn = which_alias_file()
1367 for l in open(aliasfn):
1368 alias_cache.add(l.split(':')[0])
1369 uid = email.split('@')[0]
1370 return uid in alias_cache
1372 ################################################################################
1376 Cnf = apt_pkg.newConfiguration()
1377 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1379 if which_conf_file() != default_config:
1380 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1382 ################################################################################