2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
29 from dak_exceptions import *
31 ################################################################################
33 re_comments = re.compile(r"\#.*")
34 re_no_epoch = re.compile(r"^\d+\:")
35 re_no_revision = re.compile(r"-[^-]+$")
36 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
37 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
38 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
39 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
41 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
42 re_multi_line_field = re.compile(r"^\s(.*)")
43 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
45 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
46 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
48 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
49 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
51 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
53 default_config = "/etc/dak/dak.conf"
54 default_apt_config = "/etc/dak/apt.conf"
57 key_uid_email_cache = {}
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))]
63 ################################################################################
65 def open_file(filename, mode='r'):
67 f = open(filename, mode)
69 raise CantOpenError, filename
72 ################################################################################
74 def our_raw_input(prompt=""):
76 sys.stdout.write(prompt)
82 sys.stderr.write("\nUser interrupt (^D).\n")
85 ################################################################################
87 def extract_component_from_section(section):
90 if section.find('/') != -1:
91 component = section.split('/')[0]
93 # Expand default component
95 if Cnf.has_key("Component::%s" % section):
100 return (section, component)
102 ################################################################################
104 def parse_deb822(contents, signing_rules=0):
108 # Split the lines in the input, keeping the linebreaks.
109 lines = contents.splitlines(True)
112 raise ParseChangesError, "[Empty changes file]"
114 # Reindex by line number so we can easily verify the format of
120 indexed_lines[index] = line[:-1]
124 num_of_lines = len(indexed_lines.keys())
127 while index < num_of_lines:
129 line = indexed_lines[index]
131 if signing_rules == 1:
133 if index > num_of_lines:
134 raise InvalidDscError, index
135 line = indexed_lines[index]
136 if not line.startswith("-----BEGIN PGP SIGNATURE"):
137 raise InvalidDscError, index
142 if line.startswith("-----BEGIN PGP SIGNATURE"):
144 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
146 if signing_rules == 1:
147 while index < num_of_lines and line != "":
149 line = indexed_lines[index]
151 # If we're not inside the signed data, don't process anything
152 if signing_rules >= 0 and not inside_signature:
154 slf = re_single_line_field.match(line)
156 field = slf.groups()[0].lower()
157 changes[field] = slf.groups()[1]
161 changes[field] += '\n'
163 mlf = re_multi_line_field.match(line)
166 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
167 if first == 1 and changes[field] != "":
168 changes[field] += '\n'
170 changes[field] += mlf.groups()[0] + '\n'
174 if signing_rules == 1 and inside_signature:
175 raise InvalidDscError, index
177 changes["filecontents"] = "".join(lines)
179 if changes.has_key("source"):
180 # Strip the source version in brackets from the source field,
181 # put it in the "source-version" field instead.
182 srcver = re_srchasver.search(changes["source"])
184 changes["source"] = srcver.group(1)
185 changes["source-version"] = srcver.group(2)
188 raise ParseChangesError, error
192 ################################################################################
194 def parse_changes(filename, signing_rules=0):
195 """Parses a changes file and returns a dictionary where each field is a
196 key. The mandatory first argument is the filename of the .changes
199 signing_rules is an optional argument:
201 o If signing_rules == -1, no signature is required.
202 o If signing_rules == 0 (the default), a signature is required.
203 o If signing_rules == 1, it turns on the same strict format checking
206 The rules for (signing_rules == 1)-mode are:
208 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
209 followed by any PGP header data and must end with a blank line.
211 o The data section must end with a blank line and must be followed by
212 "-----BEGIN PGP SIGNATURE-----".
215 changes_in = open_file(filename)
216 content = changes_in.read()
218 return parse_deb822(content, signing_rules)
220 ################################################################################
222 def hash_key(hashname):
223 return '%ssum' % hashname
225 ################################################################################
227 def create_hash(where, files, hashname, hashfunc):
228 """create_hash extends the passed files dict with the given hash by
229 iterating over all files on disk and passing them to the hashing
233 for f in files.keys():
235 file_handle = open_file(f)
236 except CantOpenError:
237 rejmsg.append("Could not open file %s for checksumming" % (f))
239 files[f][hash_key(hashname)] = hashfunc(file_handle)
244 ################################################################################
246 def check_hash(where, files, hashname, hashfunc):
247 """check_hash checks the given hash in the files dict against the actual
248 files on disk. The hash values need to be present consistently in
249 all file entries. It does not modify its input in any way."""
252 for f in files.keys():
256 file_handle = open_file(f)
258 # Check for the hash entry, to not trigger a KeyError.
259 if not files[f].has_key(hash_key(hashname)):
260 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
264 # Actually check the hash for correctness.
265 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
266 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
268 except CantOpenError:
269 # TODO: This happens when the file is in the pool.
270 # warn("Cannot open file %s" % f)
277 ################################################################################
279 def check_size(where, files):
280 """check_size checks the file sizes in the passed files dict against the
284 for f in files.keys():
289 # TODO: This happens when the file is in the pool.
293 actual_size = entry[stat.ST_SIZE]
294 size = int(files[f]["size"])
295 if size != actual_size:
296 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
297 % (f, actual_size, size, where))
300 ################################################################################
302 def check_hash_fields(what, manifest):
303 """check_hash_fields ensures that there are no checksum fields in the
304 given dict that we do not know about."""
307 hashes = map(lambda x: x[0], known_hashes)
308 for field in manifest:
309 if field.startswith("checksums-"):
310 hashname = field.split("-",1)[1]
311 if hashname not in hashes:
312 rejmsg.append("Unsupported checksum field for %s "\
313 "in %s" % (hashname, what))
316 ################################################################################
318 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
319 if format >= version:
320 # The version should contain the specified hash.
323 # Import hashes from the changes
324 rejmsg = parse_checksums(".changes", files, changes, hashname)
328 # We need to calculate the hash because it can't possibly
331 return func(".changes", files, hashname, hashfunc)
333 # We could add the orig which might be in the pool to the files dict to
334 # access the checksums easily.
336 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
337 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
338 in the dsc is correct, i.e. identical to the changes file and if necessary
339 the pool. The latter task is delegated to check_hash."""
342 if not dsc.has_key('Checksums-%s' % (hashname,)):
344 # Import hashes from the dsc
345 parse_checksums(".dsc", dsc_files, dsc, hashname)
347 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
350 ################################################################################
352 def ensure_hashes(changes, dsc, files, dsc_files):
355 # Make sure we recognise the format of the Files: field in the .changes
356 format = changes.get("format", "0.0").split(".", 1)
358 format = int(format[0]), int(format[1])
360 format = int(float(format[0])), 0
362 # We need to deal with the original changes blob, as the fields we need
363 # might not be in the changes dict serialised into the .dak anymore.
364 orig_changes = parse_deb822(changes['filecontents'])
366 # Copy the checksums over to the current changes dict. This will keep
367 # the existing modifications to it intact.
368 for field in orig_changes:
369 if field.startswith('checksums-'):
370 changes[field] = orig_changes[field]
372 # Check for unsupported hashes
373 rejmsg.extend(check_hash_fields(".changes", changes))
374 rejmsg.extend(check_hash_fields(".dsc", dsc))
376 # We have to calculate the hash if we have an earlier changes version than
377 # the hash appears in rather than require it exist in the changes file
378 for hashname, hashfunc, version in known_hashes:
379 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
381 if "source" in changes["architecture"]:
382 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
387 def parse_checksums(where, files, manifest, hashname):
389 field = 'checksums-%s' % hashname
390 if not field in manifest:
392 input = manifest[field]
393 for line in input.split('\n'):
396 hash, size, file = line.strip().split(' ')
397 # TODO: check for the file's entry in the original files dict, not
398 # the one modified by (auto)byhand and other weird stuff
399 #if not files.has_key(file):
400 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
401 # (file, hashname, where))
402 if not files[file]["size"] == size:
403 rejmsg.append("%s: size differs for files and checksums-%s entry "\
404 "in %s" % (file, hashname, where))
406 files[file][hash_key(hashname)] = hash
407 for f in files.keys():
408 if not files[f].has_key(hash_key(hashname)):
409 rejmsg.append("%s: no entry in checksums-%s in %s" % (file,
413 ################################################################################
415 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
417 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
420 # Make sure we have a Files: field to parse...
421 if not changes.has_key(field):
422 raise NoFilesFieldError
424 # Make sure we recognise the format of the Files: field
425 format = re_verwithext.search(changes.get("format", "0.0"))
427 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
429 format = format.groups()
430 if format[1] == None:
431 format = int(float(format[0])), 0, format[2]
433 format = int(format[0]), int(format[1]), format[2]
434 if format[2] == None:
439 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
441 if (format < (1,5) or format > (1,8)):
442 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
443 if field != "files" and format < (1,8):
444 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
446 includes_section = (not is_a_dsc) and field == "files"
448 # Parse each entry/line:
449 for i in changes[field].split('\n'):
453 section = priority = ""
456 (md5, size, section, priority, name) = s
458 (md5, size, name) = s
460 raise ParseChangesError, i
467 (section, component) = extract_component_from_section(section)
469 files[name] = Dict(size=size, section=section,
470 priority=priority, component=component)
471 files[name][hashname] = md5
475 ################################################################################
477 def force_to_utf8(s):
478 """Forces a string to UTF-8. If the string isn't already UTF-8,
479 it's assumed to be ISO-8859-1."""
484 latin1_s = unicode(s,'iso8859-1')
485 return latin1_s.encode('utf-8')
487 def rfc2047_encode(s):
488 """Encodes a (header) string per RFC2047 if necessary. If the
489 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
491 codecs.lookup('ascii')[1](s)
496 codecs.lookup('utf-8')[1](s)
497 h = email.Header.Header(s, 'utf-8', 998)
500 h = email.Header.Header(s, 'iso-8859-1', 998)
503 ################################################################################
505 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
506 # with it. I know - I'll fix the suckage and make things
509 def fix_maintainer (maintainer):
510 """Parses a Maintainer or Changed-By field and returns:
511 (1) an RFC822 compatible version,
512 (2) an RFC2047 compatible version,
516 The name is forced to UTF-8 for both (1) and (3). If the name field
517 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
518 switched to 'email (name)' format."""
519 maintainer = maintainer.strip()
521 return ('', '', '', '')
523 if maintainer.find("<") == -1:
526 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
527 email = maintainer[1:-1]
530 m = re_parse_maintainer.match(maintainer)
532 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
536 # Get an RFC2047 compliant version of the name
537 rfc2047_name = rfc2047_encode(name)
539 # Force the name to be UTF-8
540 name = force_to_utf8(name)
542 if name.find(',') != -1 or name.find('.') != -1:
543 rfc822_maint = "%s (%s)" % (email, name)
544 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
546 rfc822_maint = "%s <%s>" % (name, email)
547 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
549 if email.find("@") == -1 and email.find("buildd_") != 0:
550 raise ParseMaintError, "No @ found in email address part."
552 return (rfc822_maint, rfc2047_maint, name, email)
554 ################################################################################
556 # sendmail wrapper, takes _either_ a message string or a file as arguments
557 def send_mail (message, filename=""):
558 # If we've been passed a string dump it into a temporary file
560 filename = tempfile.mktemp()
561 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
562 os.write (fd, message)
566 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
568 raise SendmailFailedError, output
570 # Clean up any temporary files
574 ################################################################################
576 def poolify (source, component):
579 if source[:3] == "lib":
580 return component + source[:4] + '/' + source + '/'
582 return component + source[:1] + '/' + source + '/'
584 ################################################################################
586 def move (src, dest, overwrite = 0, perms = 0664):
587 if os.path.exists(dest) and os.path.isdir(dest):
590 dest_dir = os.path.dirname(dest)
591 if not os.path.exists(dest_dir):
592 umask = os.umask(00000)
593 os.makedirs(dest_dir, 02775)
595 #print "Moving %s to %s..." % (src, dest)
596 if os.path.exists(dest) and os.path.isdir(dest):
597 dest += '/' + os.path.basename(src)
598 # Don't overwrite unless forced to
599 if os.path.exists(dest):
601 fubar("Can't move %s to %s - file already exists." % (src, dest))
603 if not os.access(dest, os.W_OK):
604 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
605 shutil.copy2(src, dest)
606 os.chmod(dest, perms)
609 def copy (src, dest, overwrite = 0, perms = 0664):
610 if os.path.exists(dest) and os.path.isdir(dest):
613 dest_dir = os.path.dirname(dest)
614 if not os.path.exists(dest_dir):
615 umask = os.umask(00000)
616 os.makedirs(dest_dir, 02775)
618 #print "Copying %s to %s..." % (src, dest)
619 if os.path.exists(dest) and os.path.isdir(dest):
620 dest += '/' + os.path.basename(src)
621 # Don't overwrite unless forced to
622 if os.path.exists(dest):
624 raise FileExistsError
626 if not os.access(dest, os.W_OK):
627 raise CantOverwriteError
628 shutil.copy2(src, dest)
629 os.chmod(dest, perms)
631 ################################################################################
634 res = socket.gethostbyaddr(socket.gethostname())
635 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
636 if database_hostname:
637 return database_hostname
641 def which_conf_file ():
642 res = socket.gethostbyaddr(socket.gethostname())
643 if Cnf.get("Config::" + res[0] + "::DakConfig"):
644 return Cnf["Config::" + res[0] + "::DakConfig"]
646 return default_config
648 def which_apt_conf_file ():
649 res = socket.gethostbyaddr(socket.gethostname())
650 if Cnf.get("Config::" + res[0] + "::AptConfig"):
651 return Cnf["Config::" + res[0] + "::AptConfig"]
653 return default_apt_config
655 def which_alias_file():
656 hostname = socket.gethostbyaddr(socket.gethostname())[0]
657 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
658 if os.path.exists(aliasfn):
663 ################################################################################
665 # Escape characters which have meaning to SQL's regex comparison operator ('~')
666 # (woefully incomplete)
669 s = s.replace('+', '\\\\+')
670 s = s.replace('.', '\\\\.')
673 ################################################################################
675 # Perform a substition of template
676 def TemplateSubst(map, filename):
677 file = open_file(filename)
678 template = file.read()
680 template = template.replace(x,map[x])
684 ################################################################################
686 def fubar(msg, exit_code=1):
687 sys.stderr.write("E: %s\n" % (msg))
691 sys.stderr.write("W: %s\n" % (msg))
693 ################################################################################
695 # Returns the user name with a laughable attempt at rfc822 conformancy
696 # (read: removing stray periods).
698 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
700 ################################################################################
710 return ("%d%s" % (c, t))
712 ################################################################################
714 def cc_fix_changes (changes):
715 o = changes.get("architecture", "")
717 del changes["architecture"]
718 changes["architecture"] = {}
720 changes["architecture"][j] = 1
722 # Sort by source name, source version, 'have source', and then by filename
723 def changes_compare (a, b):
725 a_changes = parse_changes(a)
730 b_changes = parse_changes(b)
734 cc_fix_changes (a_changes)
735 cc_fix_changes (b_changes)
737 # Sort by source name
738 a_source = a_changes.get("source")
739 b_source = b_changes.get("source")
740 q = cmp (a_source, b_source)
744 # Sort by source version
745 a_version = a_changes.get("version", "0")
746 b_version = b_changes.get("version", "0")
747 q = apt_pkg.VersionCompare(a_version, b_version)
751 # Sort by 'have source'
752 a_has_source = a_changes["architecture"].get("source")
753 b_has_source = b_changes["architecture"].get("source")
754 if a_has_source and not b_has_source:
756 elif b_has_source and not a_has_source:
759 # Fall back to sort by filename
762 ################################################################################
764 def find_next_free (dest, too_many=100):
767 while os.path.exists(dest) and extra < too_many:
768 dest = orig_dest + '.' + repr(extra)
770 if extra >= too_many:
771 raise NoFreeFilenameError
774 ################################################################################
776 def result_join (original, sep = '\t'):
778 for i in xrange(len(original)):
779 if original[i] == None:
782 list.append(original[i])
783 return sep.join(list)
785 ################################################################################
787 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
789 for line in str.split('\n'):
791 if line or include_blank_lines:
792 out += "%s%s\n" % (prefix, line)
793 # Strip trailing new line
798 ################################################################################
800 def validate_changes_file_arg(filename, require_changes=1):
801 """'filename' is either a .changes or .dak file. If 'filename' is a
802 .dak file, it's changed to be the corresponding .changes file. The
803 function then checks if the .changes file a) exists and b) is
804 readable and returns the .changes filename if so. If there's a
805 problem, the next action depends on the option 'require_changes'
808 o If 'require_changes' == -1, errors are ignored and the .changes
809 filename is returned.
810 o If 'require_changes' == 0, a warning is given and 'None' is returned.
811 o If 'require_changes' == 1, a fatal error is raised.
815 orig_filename = filename
816 if filename.endswith(".dak"):
817 filename = filename[:-4]+".changes"
819 if not filename.endswith(".changes"):
820 error = "invalid file type; not a changes file"
822 if not os.access(filename,os.R_OK):
823 if os.path.exists(filename):
824 error = "permission denied"
826 error = "file not found"
829 if require_changes == 1:
830 fubar("%s: %s." % (orig_filename, error))
831 elif require_changes == 0:
832 warn("Skipping %s - %s" % (orig_filename, error))
834 else: # We only care about the .dak file
839 ################################################################################
842 return (arch != "source" and arch != "all")
844 ################################################################################
846 def join_with_commas_and(list):
847 if len(list) == 0: return "nothing"
848 if len(list) == 1: return list[0]
849 return ", ".join(list[:-1]) + " and " + list[-1]
851 ################################################################################
856 (pkg, version, constraint) = atom
858 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
861 pp_deps.append(pp_dep)
862 return " |".join(pp_deps)
864 ################################################################################
869 ################################################################################
871 # Handle -a, -c and -s arguments; returns them as SQL constraints
872 def parse_args(Options):
876 for suite in split_args(Options["Suite"]):
877 suite_id = database.get_suite_id(suite)
879 warn("suite '%s' not recognised." % (suite))
881 suite_ids_list.append(suite_id)
883 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
885 fubar("No valid suite given.")
890 if Options["Component"]:
891 component_ids_list = []
892 for component in split_args(Options["Component"]):
893 component_id = database.get_component_id(component)
894 if component_id == -1:
895 warn("component '%s' not recognised." % (component))
897 component_ids_list.append(component_id)
898 if component_ids_list:
899 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
901 fubar("No valid component given.")
905 # Process architecture
906 con_architectures = ""
907 if Options["Architecture"]:
910 for architecture in split_args(Options["Architecture"]):
911 if architecture == "source":
914 architecture_id = database.get_architecture_id(architecture)
915 if architecture_id == -1:
916 warn("architecture '%s' not recognised." % (architecture))
918 arch_ids_list.append(architecture_id)
920 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
923 fubar("No valid architecture given.")
927 return (con_suites, con_architectures, con_components, check_source)
929 ################################################################################
931 # Inspired(tm) by Bryn Keller's print_exc_plus (See
932 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
935 tb = sys.exc_info()[2]
944 traceback.print_exc()
946 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
947 frame.f_code.co_filename,
949 for key, value in frame.f_locals.items():
950 print "\t%20s = " % key,
954 print "<unable to print>"
956 ################################################################################
958 def try_with_debug(function):
966 ################################################################################
968 # Function for use in sorting lists of architectures.
969 # Sorts normally except that 'source' dominates all others.
971 def arch_compare_sw (a, b):
972 if a == "source" and b == "source":
981 ################################################################################
983 # Split command line arguments which can be separated by either commas
984 # or whitespace. If dwim is set, it will complain about string ending
985 # in comma since this usually means someone did 'dak ls -a i386, m68k
986 # foo' or something and the inevitable confusion resulting from 'm68k'
987 # being treated as an argument is undesirable.
989 def split_args (s, dwim=1):
990 if s.find(",") == -1:
993 if s[-1:] == "," and dwim:
994 fubar("split_args: found trailing comma, spurious space maybe?")
997 ################################################################################
999 def Dict(**dict): return dict
1001 ########################################
1003 # Our very own version of commands.getouputstatus(), hacked to support
1005 def gpgv_get_status_output(cmd, status_read, status_write):
1006 cmd = ['/bin/sh', '-c', cmd]
1007 p2cread, p2cwrite = os.pipe()
1008 c2pread, c2pwrite = os.pipe()
1009 errout, errin = os.pipe()
1019 for i in range(3, 256):
1020 if i != status_write:
1026 os.execvp(cmd[0], cmd)
1032 os.dup2(c2pread, c2pwrite)
1033 os.dup2(errout, errin)
1035 output = status = ""
1037 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1040 r = os.read(fd, 8196)
1042 more_data.append(fd)
1043 if fd == c2pwrite or fd == errin:
1045 elif fd == status_read:
1048 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1050 pid, exit_status = os.waitpid(pid, 0)
1052 os.close(status_write)
1053 os.close(status_read)
1063 return output, status, exit_status
1065 ################################################################################
1067 def process_gpgv_output(status):
1068 # Process the status-fd output
1071 for line in status.split('\n'):
1075 split = line.split()
1077 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1079 (gnupg, keyword) = split[:2]
1080 if gnupg != "[GNUPG:]":
1081 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1084 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1085 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1088 keywords[keyword] = args
1090 return (keywords, internal_error)
1092 ################################################################################
1094 def retrieve_key (filename, keyserver=None, keyring=None):
1095 """Retrieve the key that signed 'filename' from 'keyserver' and
1096 add it to 'keyring'. Returns nothing on success, or an error message
1099 # Defaults for keyserver and keyring
1101 keyserver = Cnf["Dinstall::KeyServer"]
1103 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1105 # Ensure the filename contains no shell meta-characters or other badness
1106 if not re_taint_free.match(filename):
1107 return "%s: tainted filename" % (filename)
1109 # Invoke gpgv on the file
1110 status_read, status_write = os.pipe();
1111 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1112 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1114 # Process the status-fd output
1115 (keywords, internal_error) = process_gpgv_output(status)
1117 return internal_error
1119 if not keywords.has_key("NO_PUBKEY"):
1120 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1122 fingerprint = keywords["NO_PUBKEY"][0]
1123 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1124 # it'll try to create a lockfile in /dev. A better solution might
1125 # be a tempfile or something.
1126 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1127 % (Cnf["Dinstall::SigningKeyring"])
1128 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1129 % (keyring, keyserver, fingerprint)
1130 (result, output) = commands.getstatusoutput(cmd)
1132 return "'%s' failed with exit code %s" % (cmd, result)
1136 ################################################################################
1138 def gpg_keyring_args(keyrings=None):
1140 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1142 return " ".join(["--keyring %s" % x for x in keyrings])
1144 ################################################################################
1146 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1147 """Check the signature of a file and return the fingerprint if the
1148 signature is valid or 'None' if it's not. The first argument is the
1149 filename whose signature should be checked. The second argument is a
1150 reject function and is called when an error is found. The reject()
1151 function must allow for two arguments: the first is the error message,
1152 the second is an optional prefix string. It's possible for reject()
1153 to be called more than once during an invocation of check_signature().
1154 The third argument is optional and is the name of the files the
1155 detached signature applies to. The fourth argument is optional and is
1156 a *list* of keyrings to use. 'autofetch' can either be None, True or
1157 False. If None, the default behaviour specified in the config will be
1160 # Ensure the filename contains no shell meta-characters or other badness
1161 if not re_taint_free.match(sig_filename):
1162 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1165 if data_filename and not re_taint_free.match(data_filename):
1166 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1170 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1172 # Autofetch the signing key if that's enabled
1173 if autofetch == None:
1174 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1176 error_msg = retrieve_key(sig_filename)
1181 # Build the command line
1182 status_read, status_write = os.pipe();
1183 cmd = "gpgv --status-fd %s %s %s %s" % (
1184 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1186 # Invoke gpgv on the file
1187 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1189 # Process the status-fd output
1190 (keywords, internal_error) = process_gpgv_output(status)
1192 # If we failed to parse the status-fd output, let's just whine and bail now
1194 reject("internal error while performing signature check on %s." % (sig_filename))
1195 reject(internal_error, "")
1196 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1200 # Now check for obviously bad things in the processed output
1201 if keywords.has_key("KEYREVOKED"):
1202 reject("The key used to sign %s has been revoked." % (sig_filename))
1204 if keywords.has_key("BADSIG"):
1205 reject("bad signature on %s." % (sig_filename))
1207 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1208 reject("failed to check signature on %s." % (sig_filename))
1210 if keywords.has_key("NO_PUBKEY"):
1211 args = keywords["NO_PUBKEY"]
1214 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1216 if keywords.has_key("BADARMOR"):
1217 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1219 if keywords.has_key("NODATA"):
1220 reject("no signature found in %s." % (sig_filename))
1222 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1223 args = keywords["KEYEXPIRED"]
1226 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1232 # Next check gpgv exited with a zero return code
1234 reject("gpgv failed while checking %s." % (sig_filename))
1236 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1238 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1241 # Sanity check the good stuff we expect
1242 if not keywords.has_key("VALIDSIG"):
1243 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1246 args = keywords["VALIDSIG"]
1248 reject("internal error while checking signature on %s." % (sig_filename))
1251 fingerprint = args[0]
1252 if not keywords.has_key("GOODSIG"):
1253 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1255 if not keywords.has_key("SIG_ID"):
1256 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1259 # Finally ensure there's not something we don't recognise
1260 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1261 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1262 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1264 for keyword in keywords.keys():
1265 if not known_keywords.has_key(keyword):
1266 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1274 ################################################################################
1276 def gpg_get_key_addresses(fingerprint):
1277 """retreive email addresses from gpg key uids for a given fingerprint"""
1278 addresses = key_uid_email_cache.get(fingerprint)
1279 if addresses != None:
1282 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1283 % (gpg_keyring_args(), fingerprint)
1284 (result, output) = commands.getstatusoutput(cmd)
1286 for l in output.split('\n'):
1287 m = re_gpg_uid.match(l)
1289 addresses.add(m.group(1))
1290 key_uid_email_cache[fingerprint] = addresses
1293 ################################################################################
1295 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1297 def wrap(paragraph, max_length, prefix=""):
1301 words = paragraph.split()
1304 word_size = len(word)
1305 if word_size > max_length:
1307 s += line + '\n' + prefix
1308 s += word + '\n' + prefix
1311 new_length = len(line) + word_size + 1
1312 if new_length > max_length:
1313 s += line + '\n' + prefix
1326 ################################################################################
1328 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1329 # Returns fixed 'src'
1330 def clean_symlink (src, dest, root):
1331 src = src.replace(root, '', 1)
1332 dest = dest.replace(root, '', 1)
1333 dest = os.path.dirname(dest)
1334 new_src = '../' * len(dest.split('/'))
1335 return new_src + src
1337 ################################################################################
1339 def temp_filename(directory=None, dotprefix=None, perms=0700):
1340 """Return a secure and unique filename by pre-creating it.
1341 If 'directory' is non-null, it will be the directory the file is pre-created in.
1342 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1345 old_tempdir = tempfile.tempdir
1346 tempfile.tempdir = directory
1348 filename = tempfile.mktemp()
1351 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1352 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1356 tempfile.tempdir = old_tempdir
1360 ################################################################################
1362 # checks if the user part of the email is listed in the alias file
1364 def is_email_alias(email):
1366 if alias_cache == None:
1367 aliasfn = which_alias_file()
1370 for l in open(aliasfn):
1371 alias_cache.add(l.split(':')[0])
1372 uid = email.split('@')[0]
1373 return uid in alias_cache
1375 ################################################################################
1379 Cnf = apt_pkg.newConfiguration()
1380 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1382 if which_conf_file() != default_config:
1383 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1385 ################################################################################