2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
29 from dak_exceptions import *
31 ################################################################################
33 re_comments = re.compile(r"\#.*")
34 re_no_epoch = re.compile(r"^\d+\:")
35 re_no_revision = re.compile(r"-[^-]+$")
36 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
37 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
38 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
39 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
41 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
42 re_multi_line_field = re.compile(r"^\s(.*)")
43 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
45 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
46 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
48 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
49 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
51 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
53 default_config = "/etc/dak/dak.conf"
54 default_apt_config = "/etc/dak/apt.conf"
57 key_uid_email_cache = {}
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))]
63 ################################################################################
65 def open_file(filename, mode='r'):
67 f = open(filename, mode)
69 raise CantOpenError, filename
72 ################################################################################
74 def our_raw_input(prompt=""):
76 sys.stdout.write(prompt)
82 sys.stderr.write("\nUser interrupt (^D).\n")
85 ################################################################################
87 def extract_component_from_section(section):
90 if section.find('/') != -1:
91 component = section.split('/')[0]
93 # Expand default component
95 if Cnf.has_key("Component::%s" % section):
100 return (section, component)
102 ################################################################################
104 def parse_deb822(contents, signing_rules=0):
108 # Split the lines in the input, keeping the linebreaks.
109 lines = contents.splitlines(True)
112 raise ParseChangesError, "[Empty changes file]"
114 # Reindex by line number so we can easily verify the format of
120 indexed_lines[index] = line[:-1]
124 num_of_lines = len(indexed_lines.keys())
127 while index < num_of_lines:
129 line = indexed_lines[index]
131 if signing_rules == 1:
133 if index > num_of_lines:
134 raise InvalidDscError, index
135 line = indexed_lines[index]
136 if not line.startswith("-----BEGIN PGP SIGNATURE"):
137 raise InvalidDscError, index
142 if line.startswith("-----BEGIN PGP SIGNATURE"):
144 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
146 if signing_rules == 1:
147 while index < num_of_lines and line != "":
149 line = indexed_lines[index]
151 # If we're not inside the signed data, don't process anything
152 if signing_rules >= 0 and not inside_signature:
154 slf = re_single_line_field.match(line)
156 field = slf.groups()[0].lower()
157 changes[field] = slf.groups()[1]
161 changes[field] += '\n'
163 mlf = re_multi_line_field.match(line)
166 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
167 if first == 1 and changes[field] != "":
168 changes[field] += '\n'
170 changes[field] += mlf.groups()[0] + '\n'
174 if signing_rules == 1 and inside_signature:
175 raise InvalidDscError, index
177 changes["filecontents"] = "".join(lines)
179 if changes.has_key("source"):
180 # Strip the source version in brackets from the source field,
181 # put it in the "source-version" field instead.
182 srcver = re_srchasver.search(changes["source"])
184 changes["source"] = srcver.group(1)
185 changes["source-version"] = srcver.group(2)
188 raise ParseChangesError, error
192 ################################################################################
194 def parse_changes(filename, signing_rules=0):
195 """Parses a changes file and returns a dictionary where each field is a
196 key. The mandatory first argument is the filename of the .changes
199 signing_rules is an optional argument:
201 o If signing_rules == -1, no signature is required.
202 o If signing_rules == 0 (the default), a signature is required.
203 o If signing_rules == 1, it turns on the same strict format checking
206 The rules for (signing_rules == 1)-mode are:
208 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
209 followed by any PGP header data and must end with a blank line.
211 o The data section must end with a blank line and must be followed by
212 "-----BEGIN PGP SIGNATURE-----".
215 changes_in = open_file(filename)
216 content = changes_in.read()
218 return parse_deb822(content, signing_rules)
220 ################################################################################
222 def hash_key(hashname):
223 return '%ssum' % hashname
225 ################################################################################
227 def create_hash(where, files, hashname, hashfunc):
228 """create_hash extends the passed files dict with the given hash by
229 iterating over all files on disk and passing them to the hashing
233 for f in files.keys():
235 file_handle = open_file(f)
236 except CantOpenError:
237 rejmsg.append("Could not open file %s for checksumming" % (f))
239 files[f][hash_key(hashname)] = hashfunc(file_handle)
244 ################################################################################
246 def check_hash(where, files, hashname, hashfunc):
247 """check_hash checks the given hash in the files dict against the actual
248 files on disk. The hash values need to be present consistently in
249 all file entries. It does not modify its input in any way."""
252 for f in files.keys():
255 file_handle = open_file(f)
257 # Check for the hash entry, to not trigger a KeyError.
258 if not files[f].has_key(hash_key(hashname)):
259 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
263 # Actually check the hash for correctness.
264 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
265 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
267 except CantOpenError:
268 # TODO: This happens when the file is in the pool.
269 warn("Cannot open file %s" % f)
276 ################################################################################
278 def check_size(where, files):
279 """check_size checks the file sizes in the passed files dict against the
283 for f in files.keys():
288 # TODO: This happens when the file is in the pool.
292 actual_size = entry[stat.ST_SIZE]
293 size = int(files[f]["size"])
294 if size != actual_size:
295 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
296 % (f, actual_size, size, where))
299 ################################################################################
301 def check_hash_fields(what, manifest):
302 """check_hash_fields ensures that there are no checksum fields in the
303 given dict that we do not know about."""
306 hashes = map(lambda x: x[0], known_hashes)
307 for field in manifest:
308 if field.startswith("checksums-"):
309 hashname = field.split("-",1)[1]
310 if hashname not in hashes:
311 rejmsg.append("Unsupported checksum field for %s "\
312 "in %s" % (hashname, what))
315 ################################################################################
317 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
318 if format >= version:
319 # The version should contain the specified hash.
322 # Import hashes from the changes
323 rejmsg = parse_checksums(".changes", files, changes, hashname)
327 # We need to calculate the hash because it can't possibly
330 return func(".changes", files, hashname, hashfunc)
332 # We could add the orig which might be in the pool to the files dict to
333 # access the checksums easily.
335 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
336 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
337 in the dsc is correct, i.e. identical to the changes file and if necessary
338 the pool. The latter task is delegated to check_hash."""
341 if not dsc.has_key('Checksums-%s' % (hashname,)):
343 # Import hashes from the dsc
344 parse_checksums(".dsc", dsc_files, dsc, hashname)
346 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
349 ################################################################################
351 def ensure_hashes(changes, dsc, files, dsc_files):
354 # Make sure we recognise the format of the Files: field in the .changes
355 format = changes.get("format", "0.0").split(".", 1)
357 format = int(format[0]), int(format[1])
359 format = int(float(format[0])), 0
361 # We need to deal with the original changes blob, as the fields we need
362 # might not be in the changes dict serialised into the .dak anymore.
363 orig_changes = parse_deb822(changes['filecontents'])
365 # Copy the checksums over to the current changes dict. This will keep
366 # the existing modifications to it intact.
367 for field in orig_changes:
368 if field.startswith('checksums-'):
369 changes[field] = orig_changes[field]
371 # Check for unsupported hashes
372 rejmsg.extend(check_hash_fields(".changes", changes))
373 rejmsg.extend(check_hash_fields(".dsc", dsc))
375 # We have to calculate the hash if we have an earlier changes version than
376 # the hash appears in rather than require it exist in the changes file
377 for hashname, hashfunc, version in known_hashes:
378 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
380 if "source" in changes["architecture"]:
381 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
386 def parse_checksums(where, files, manifest, hashname):
388 field = 'checksums-%s' % hashname
389 if not field in manifest:
391 input = manifest[field]
392 for line in input.split('\n'):
395 hash, size, file = line.strip().split(' ')
396 if not files.has_key(file):
397 rejmsg.append("%s: not present in files but in checksums-%s in %s" %
398 (file, hashname, where))
399 if not files[file]["size"] == size:
400 rejmsg.append("%s: size differs for files and checksums-%s entry "\
401 "in %s" % (file, hashname, where))
402 files[file][hash_key(hashname)] = hash
403 for f in files.keys():
404 if not files[f].has_key(hash_key(hashname)):
405 rejmsg.append("%s: no entry in checksums-%s in %s" % (file,
409 ################################################################################
411 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
413 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
416 # Make sure we have a Files: field to parse...
417 if not changes.has_key(field):
418 raise NoFilesFieldError
420 # Make sure we recognise the format of the Files: field
421 format = re_verwithext.search(changes.get("format", "0.0"))
423 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
425 format = format.groups()
426 if format[1] == None:
427 format = int(float(format[0])), 0, format[2]
429 format = int(format[0]), int(format[1]), format[2]
430 if format[2] == None:
435 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
437 if (format < (1,5) or format > (1,8)):
438 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
439 if field != "files" and format < (1,8):
440 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
442 includes_section = (not is_a_dsc) and field == "files"
444 # Parse each entry/line:
445 for i in changes[field].split('\n'):
449 section = priority = ""
452 (md5, size, section, priority, name) = s
454 (md5, size, name) = s
456 raise ParseChangesError, i
463 (section, component) = extract_component_from_section(section)
465 files[name] = Dict(size=size, section=section,
466 priority=priority, component=component)
467 files[name][hashname] = md5
471 ################################################################################
473 def force_to_utf8(s):
474 """Forces a string to UTF-8. If the string isn't already UTF-8,
475 it's assumed to be ISO-8859-1."""
480 latin1_s = unicode(s,'iso8859-1')
481 return latin1_s.encode('utf-8')
483 def rfc2047_encode(s):
484 """Encodes a (header) string per RFC2047 if necessary. If the
485 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
487 codecs.lookup('ascii')[1](s)
492 codecs.lookup('utf-8')[1](s)
493 h = email.Header.Header(s, 'utf-8', 998)
496 h = email.Header.Header(s, 'iso-8859-1', 998)
499 ################################################################################
501 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
502 # with it. I know - I'll fix the suckage and make things
505 def fix_maintainer (maintainer):
506 """Parses a Maintainer or Changed-By field and returns:
507 (1) an RFC822 compatible version,
508 (2) an RFC2047 compatible version,
512 The name is forced to UTF-8 for both (1) and (3). If the name field
513 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
514 switched to 'email (name)' format."""
515 maintainer = maintainer.strip()
517 return ('', '', '', '')
519 if maintainer.find("<") == -1:
522 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
523 email = maintainer[1:-1]
526 m = re_parse_maintainer.match(maintainer)
528 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
532 # Get an RFC2047 compliant version of the name
533 rfc2047_name = rfc2047_encode(name)
535 # Force the name to be UTF-8
536 name = force_to_utf8(name)
538 if name.find(',') != -1 or name.find('.') != -1:
539 rfc822_maint = "%s (%s)" % (email, name)
540 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
542 rfc822_maint = "%s <%s>" % (name, email)
543 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
545 if email.find("@") == -1 and email.find("buildd_") != 0:
546 raise ParseMaintError, "No @ found in email address part."
548 return (rfc822_maint, rfc2047_maint, name, email)
550 ################################################################################
552 # sendmail wrapper, takes _either_ a message string or a file as arguments
553 def send_mail (message, filename=""):
554 # If we've been passed a string dump it into a temporary file
556 filename = tempfile.mktemp()
557 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
558 os.write (fd, message)
562 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
564 raise SendmailFailedError, output
566 # Clean up any temporary files
570 ################################################################################
572 def poolify (source, component):
575 if source[:3] == "lib":
576 return component + source[:4] + '/' + source + '/'
578 return component + source[:1] + '/' + source + '/'
580 ################################################################################
582 def move (src, dest, overwrite = 0, perms = 0664):
583 if os.path.exists(dest) and os.path.isdir(dest):
586 dest_dir = os.path.dirname(dest)
587 if not os.path.exists(dest_dir):
588 umask = os.umask(00000)
589 os.makedirs(dest_dir, 02775)
591 #print "Moving %s to %s..." % (src, dest)
592 if os.path.exists(dest) and os.path.isdir(dest):
593 dest += '/' + os.path.basename(src)
594 # Don't overwrite unless forced to
595 if os.path.exists(dest):
597 fubar("Can't move %s to %s - file already exists." % (src, dest))
599 if not os.access(dest, os.W_OK):
600 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
601 shutil.copy2(src, dest)
602 os.chmod(dest, perms)
605 def copy (src, dest, overwrite = 0, perms = 0664):
606 if os.path.exists(dest) and os.path.isdir(dest):
609 dest_dir = os.path.dirname(dest)
610 if not os.path.exists(dest_dir):
611 umask = os.umask(00000)
612 os.makedirs(dest_dir, 02775)
614 #print "Copying %s to %s..." % (src, dest)
615 if os.path.exists(dest) and os.path.isdir(dest):
616 dest += '/' + os.path.basename(src)
617 # Don't overwrite unless forced to
618 if os.path.exists(dest):
620 raise FileExistsError
622 if not os.access(dest, os.W_OK):
623 raise CantOverwriteError
624 shutil.copy2(src, dest)
625 os.chmod(dest, perms)
627 ################################################################################
630 res = socket.gethostbyaddr(socket.gethostname())
631 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
632 if database_hostname:
633 return database_hostname
637 def which_conf_file ():
638 res = socket.gethostbyaddr(socket.gethostname())
639 if Cnf.get("Config::" + res[0] + "::DakConfig"):
640 return Cnf["Config::" + res[0] + "::DakConfig"]
642 return default_config
644 def which_apt_conf_file ():
645 res = socket.gethostbyaddr(socket.gethostname())
646 if Cnf.get("Config::" + res[0] + "::AptConfig"):
647 return Cnf["Config::" + res[0] + "::AptConfig"]
649 return default_apt_config
651 def which_alias_file():
652 hostname = socket.gethostbyaddr(socket.gethostname())[0]
653 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
654 if os.path.exists(aliasfn):
659 ################################################################################
661 # Escape characters which have meaning to SQL's regex comparison operator ('~')
662 # (woefully incomplete)
665 s = s.replace('+', '\\\\+')
666 s = s.replace('.', '\\\\.')
669 ################################################################################
671 # Perform a substition of template
672 def TemplateSubst(map, filename):
673 file = open_file(filename)
674 template = file.read()
676 template = template.replace(x,map[x])
680 ################################################################################
682 def fubar(msg, exit_code=1):
683 sys.stderr.write("E: %s\n" % (msg))
687 sys.stderr.write("W: %s\n" % (msg))
689 ################################################################################
691 # Returns the user name with a laughable attempt at rfc822 conformancy
692 # (read: removing stray periods).
694 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
696 ################################################################################
706 return ("%d%s" % (c, t))
708 ################################################################################
710 def cc_fix_changes (changes):
711 o = changes.get("architecture", "")
713 del changes["architecture"]
714 changes["architecture"] = {}
716 changes["architecture"][j] = 1
718 # Sort by source name, source version, 'have source', and then by filename
719 def changes_compare (a, b):
721 a_changes = parse_changes(a)
726 b_changes = parse_changes(b)
730 cc_fix_changes (a_changes)
731 cc_fix_changes (b_changes)
733 # Sort by source name
734 a_source = a_changes.get("source")
735 b_source = b_changes.get("source")
736 q = cmp (a_source, b_source)
740 # Sort by source version
741 a_version = a_changes.get("version", "0")
742 b_version = b_changes.get("version", "0")
743 q = apt_pkg.VersionCompare(a_version, b_version)
747 # Sort by 'have source'
748 a_has_source = a_changes["architecture"].get("source")
749 b_has_source = b_changes["architecture"].get("source")
750 if a_has_source and not b_has_source:
752 elif b_has_source and not a_has_source:
755 # Fall back to sort by filename
758 ################################################################################
760 def find_next_free (dest, too_many=100):
763 while os.path.exists(dest) and extra < too_many:
764 dest = orig_dest + '.' + repr(extra)
766 if extra >= too_many:
767 raise NoFreeFilenameError
770 ################################################################################
772 def result_join (original, sep = '\t'):
774 for i in xrange(len(original)):
775 if original[i] == None:
778 list.append(original[i])
779 return sep.join(list)
781 ################################################################################
783 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
785 for line in str.split('\n'):
787 if line or include_blank_lines:
788 out += "%s%s\n" % (prefix, line)
789 # Strip trailing new line
794 ################################################################################
796 def validate_changes_file_arg(filename, require_changes=1):
797 """'filename' is either a .changes or .dak file. If 'filename' is a
798 .dak file, it's changed to be the corresponding .changes file. The
799 function then checks if the .changes file a) exists and b) is
800 readable and returns the .changes filename if so. If there's a
801 problem, the next action depends on the option 'require_changes'
804 o If 'require_changes' == -1, errors are ignored and the .changes
805 filename is returned.
806 o If 'require_changes' == 0, a warning is given and 'None' is returned.
807 o If 'require_changes' == 1, a fatal error is raised.
811 orig_filename = filename
812 if filename.endswith(".dak"):
813 filename = filename[:-4]+".changes"
815 if not filename.endswith(".changes"):
816 error = "invalid file type; not a changes file"
818 if not os.access(filename,os.R_OK):
819 if os.path.exists(filename):
820 error = "permission denied"
822 error = "file not found"
825 if require_changes == 1:
826 fubar("%s: %s." % (orig_filename, error))
827 elif require_changes == 0:
828 warn("Skipping %s - %s" % (orig_filename, error))
830 else: # We only care about the .dak file
835 ################################################################################
838 return (arch != "source" and arch != "all")
840 ################################################################################
842 def join_with_commas_and(list):
843 if len(list) == 0: return "nothing"
844 if len(list) == 1: return list[0]
845 return ", ".join(list[:-1]) + " and " + list[-1]
847 ################################################################################
852 (pkg, version, constraint) = atom
854 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
857 pp_deps.append(pp_dep)
858 return " |".join(pp_deps)
860 ################################################################################
865 ################################################################################
867 # Handle -a, -c and -s arguments; returns them as SQL constraints
868 def parse_args(Options):
872 for suite in split_args(Options["Suite"]):
873 suite_id = database.get_suite_id(suite)
875 warn("suite '%s' not recognised." % (suite))
877 suite_ids_list.append(suite_id)
879 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
881 fubar("No valid suite given.")
886 if Options["Component"]:
887 component_ids_list = []
888 for component in split_args(Options["Component"]):
889 component_id = database.get_component_id(component)
890 if component_id == -1:
891 warn("component '%s' not recognised." % (component))
893 component_ids_list.append(component_id)
894 if component_ids_list:
895 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
897 fubar("No valid component given.")
901 # Process architecture
902 con_architectures = ""
903 if Options["Architecture"]:
906 for architecture in split_args(Options["Architecture"]):
907 if architecture == "source":
910 architecture_id = database.get_architecture_id(architecture)
911 if architecture_id == -1:
912 warn("architecture '%s' not recognised." % (architecture))
914 arch_ids_list.append(architecture_id)
916 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
919 fubar("No valid architecture given.")
923 return (con_suites, con_architectures, con_components, check_source)
925 ################################################################################
927 # Inspired(tm) by Bryn Keller's print_exc_plus (See
928 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
931 tb = sys.exc_info()[2]
940 traceback.print_exc()
942 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
943 frame.f_code.co_filename,
945 for key, value in frame.f_locals.items():
946 print "\t%20s = " % key,
950 print "<unable to print>"
952 ################################################################################
954 def try_with_debug(function):
962 ################################################################################
964 # Function for use in sorting lists of architectures.
965 # Sorts normally except that 'source' dominates all others.
967 def arch_compare_sw (a, b):
968 if a == "source" and b == "source":
977 ################################################################################
979 # Split command line arguments which can be separated by either commas
980 # or whitespace. If dwim is set, it will complain about string ending
981 # in comma since this usually means someone did 'dak ls -a i386, m68k
982 # foo' or something and the inevitable confusion resulting from 'm68k'
983 # being treated as an argument is undesirable.
985 def split_args (s, dwim=1):
986 if s.find(",") == -1:
989 if s[-1:] == "," and dwim:
990 fubar("split_args: found trailing comma, spurious space maybe?")
993 ################################################################################
995 def Dict(**dict): return dict
997 ########################################
999 # Our very own version of commands.getouputstatus(), hacked to support
1001 def gpgv_get_status_output(cmd, status_read, status_write):
1002 cmd = ['/bin/sh', '-c', cmd]
1003 p2cread, p2cwrite = os.pipe()
1004 c2pread, c2pwrite = os.pipe()
1005 errout, errin = os.pipe()
1015 for i in range(3, 256):
1016 if i != status_write:
1022 os.execvp(cmd[0], cmd)
1028 os.dup2(c2pread, c2pwrite)
1029 os.dup2(errout, errin)
1031 output = status = ""
1033 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1036 r = os.read(fd, 8196)
1038 more_data.append(fd)
1039 if fd == c2pwrite or fd == errin:
1041 elif fd == status_read:
1044 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1046 pid, exit_status = os.waitpid(pid, 0)
1048 os.close(status_write)
1049 os.close(status_read)
1059 return output, status, exit_status
1061 ################################################################################
1063 def process_gpgv_output(status):
1064 # Process the status-fd output
1067 for line in status.split('\n'):
1071 split = line.split()
1073 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1075 (gnupg, keyword) = split[:2]
1076 if gnupg != "[GNUPG:]":
1077 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1080 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1081 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1084 keywords[keyword] = args
1086 return (keywords, internal_error)
1088 ################################################################################
1090 def retrieve_key (filename, keyserver=None, keyring=None):
1091 """Retrieve the key that signed 'filename' from 'keyserver' and
1092 add it to 'keyring'. Returns nothing on success, or an error message
1095 # Defaults for keyserver and keyring
1097 keyserver = Cnf["Dinstall::KeyServer"]
1099 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1101 # Ensure the filename contains no shell meta-characters or other badness
1102 if not re_taint_free.match(filename):
1103 return "%s: tainted filename" % (filename)
1105 # Invoke gpgv on the file
1106 status_read, status_write = os.pipe();
1107 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1108 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1110 # Process the status-fd output
1111 (keywords, internal_error) = process_gpgv_output(status)
1113 return internal_error
1115 if not keywords.has_key("NO_PUBKEY"):
1116 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1118 fingerprint = keywords["NO_PUBKEY"][0]
1119 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1120 # it'll try to create a lockfile in /dev. A better solution might
1121 # be a tempfile or something.
1122 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1123 % (Cnf["Dinstall::SigningKeyring"])
1124 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1125 % (keyring, keyserver, fingerprint)
1126 (result, output) = commands.getstatusoutput(cmd)
1128 return "'%s' failed with exit code %s" % (cmd, result)
1132 ################################################################################
1134 def gpg_keyring_args(keyrings=None):
1136 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1138 return " ".join(["--keyring %s" % x for x in keyrings])
1140 ################################################################################
1142 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1143 """Check the signature of a file and return the fingerprint if the
1144 signature is valid or 'None' if it's not. The first argument is the
1145 filename whose signature should be checked. The second argument is a
1146 reject function and is called when an error is found. The reject()
1147 function must allow for two arguments: the first is the error message,
1148 the second is an optional prefix string. It's possible for reject()
1149 to be called more than once during an invocation of check_signature().
1150 The third argument is optional and is the name of the files the
1151 detached signature applies to. The fourth argument is optional and is
1152 a *list* of keyrings to use. 'autofetch' can either be None, True or
1153 False. If None, the default behaviour specified in the config will be
1156 # Ensure the filename contains no shell meta-characters or other badness
1157 if not re_taint_free.match(sig_filename):
1158 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1161 if data_filename and not re_taint_free.match(data_filename):
1162 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1166 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1168 # Autofetch the signing key if that's enabled
1169 if autofetch == None:
1170 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1172 error_msg = retrieve_key(sig_filename)
1177 # Build the command line
1178 status_read, status_write = os.pipe();
1179 cmd = "gpgv --status-fd %s %s %s %s" % (
1180 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1182 # Invoke gpgv on the file
1183 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1185 # Process the status-fd output
1186 (keywords, internal_error) = process_gpgv_output(status)
1188 # If we failed to parse the status-fd output, let's just whine and bail now
1190 reject("internal error while performing signature check on %s." % (sig_filename))
1191 reject(internal_error, "")
1192 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1196 # Now check for obviously bad things in the processed output
1197 if keywords.has_key("KEYREVOKED"):
1198 reject("The key used to sign %s has been revoked." % (sig_filename))
1200 if keywords.has_key("BADSIG"):
1201 reject("bad signature on %s." % (sig_filename))
1203 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1204 reject("failed to check signature on %s." % (sig_filename))
1206 if keywords.has_key("NO_PUBKEY"):
1207 args = keywords["NO_PUBKEY"]
1210 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1212 if keywords.has_key("BADARMOR"):
1213 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1215 if keywords.has_key("NODATA"):
1216 reject("no signature found in %s." % (sig_filename))
1218 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1219 args = keywords["KEYEXPIRED"]
1222 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1228 # Next check gpgv exited with a zero return code
1230 reject("gpgv failed while checking %s." % (sig_filename))
1232 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1234 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1237 # Sanity check the good stuff we expect
1238 if not keywords.has_key("VALIDSIG"):
1239 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1242 args = keywords["VALIDSIG"]
1244 reject("internal error while checking signature on %s." % (sig_filename))
1247 fingerprint = args[0]
1248 if not keywords.has_key("GOODSIG"):
1249 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1251 if not keywords.has_key("SIG_ID"):
1252 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1255 # Finally ensure there's not something we don't recognise
1256 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1257 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1258 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1260 for keyword in keywords.keys():
1261 if not known_keywords.has_key(keyword):
1262 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1270 ################################################################################
1272 def gpg_get_key_addresses(fingerprint):
1273 """retreive email addresses from gpg key uids for a given fingerprint"""
1274 addresses = key_uid_email_cache.get(fingerprint)
1275 if addresses != None:
1278 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1279 % (gpg_keyring_args(), fingerprint)
1280 (result, output) = commands.getstatusoutput(cmd)
1282 for l in output.split('\n'):
1283 m = re_gpg_uid.match(l)
1285 addresses.add(m.group(1))
1286 key_uid_email_cache[fingerprint] = addresses
1289 ################################################################################
1291 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1293 def wrap(paragraph, max_length, prefix=""):
1297 words = paragraph.split()
1300 word_size = len(word)
1301 if word_size > max_length:
1303 s += line + '\n' + prefix
1304 s += word + '\n' + prefix
1307 new_length = len(line) + word_size + 1
1308 if new_length > max_length:
1309 s += line + '\n' + prefix
1322 ################################################################################
1324 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1325 # Returns fixed 'src'
1326 def clean_symlink (src, dest, root):
1327 src = src.replace(root, '', 1)
1328 dest = dest.replace(root, '', 1)
1329 dest = os.path.dirname(dest)
1330 new_src = '../' * len(dest.split('/'))
1331 return new_src + src
1333 ################################################################################
1335 def temp_filename(directory=None, dotprefix=None, perms=0700):
1336 """Return a secure and unique filename by pre-creating it.
1337 If 'directory' is non-null, it will be the directory the file is pre-created in.
1338 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1341 old_tempdir = tempfile.tempdir
1342 tempfile.tempdir = directory
1344 filename = tempfile.mktemp()
1347 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1348 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1352 tempfile.tempdir = old_tempdir
1356 ################################################################################
1358 # checks if the user part of the email is listed in the alias file
1360 def is_email_alias(email):
1362 if alias_cache == None:
1363 aliasfn = which_alias_file()
1366 for l in open(aliasfn):
1367 alias_cache.add(l.split(':')[0])
1368 uid = email.split('@')[0]
1369 return uid in alias_cache
1371 ################################################################################
1375 Cnf = apt_pkg.newConfiguration()
1376 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1378 if which_conf_file() != default_config:
1379 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1381 ################################################################################