2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
29 from dak_exceptions import *
31 ################################################################################
33 re_comments = re.compile(r"\#.*")
34 re_no_epoch = re.compile(r"^\d+\:")
35 re_no_revision = re.compile(r"-[^-]+$")
36 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
37 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
38 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
39 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
41 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
42 re_multi_line_field = re.compile(r"^\s(.*)")
43 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
45 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
46 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
48 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
49 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
51 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
53 default_config = "/etc/dak/dak.conf"
54 default_apt_config = "/etc/dak/apt.conf"
57 key_uid_email_cache = {}
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))]
63 ################################################################################
65 def open_file(filename, mode='r'):
67 f = open(filename, mode)
69 raise CantOpenError, filename
72 ################################################################################
74 def our_raw_input(prompt=""):
76 sys.stdout.write(prompt)
82 sys.stderr.write("\nUser interrupt (^D).\n")
85 ################################################################################
87 def extract_component_from_section(section):
90 if section.find('/') != -1:
91 component = section.split('/')[0]
93 # Expand default component
95 if Cnf.has_key("Component::%s" % section):
100 return (section, component)
102 ################################################################################
104 def parse_deb822(contents, signing_rules=0):
108 # Split the lines in the input, keeping the linebreaks.
109 lines = contents.splitlines(True)
112 raise ParseChangesError, "[Empty changes file]"
114 # Reindex by line number so we can easily verify the format of
120 indexed_lines[index] = line[:-1]
124 num_of_lines = len(indexed_lines.keys())
127 while index < num_of_lines:
129 line = indexed_lines[index]
131 if signing_rules == 1:
133 if index > num_of_lines:
134 raise InvalidDscError, index
135 line = indexed_lines[index]
136 if not line.startswith("-----BEGIN PGP SIGNATURE"):
137 raise InvalidDscError, index
142 if line.startswith("-----BEGIN PGP SIGNATURE"):
144 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
146 if signing_rules == 1:
147 while index < num_of_lines and line != "":
149 line = indexed_lines[index]
151 # If we're not inside the signed data, don't process anything
152 if signing_rules >= 0 and not inside_signature:
154 slf = re_single_line_field.match(line)
156 field = slf.groups()[0].lower()
157 changes[field] = slf.groups()[1]
161 changes[field] += '\n'
163 mlf = re_multi_line_field.match(line)
166 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
167 if first == 1 and changes[field] != "":
168 changes[field] += '\n'
170 changes[field] += mlf.groups()[0] + '\n'
174 if signing_rules == 1 and inside_signature:
175 raise InvalidDscError, index
177 changes["filecontents"] = "".join(lines)
179 if changes.has_key("source"):
180 # Strip the source version in brackets from the source field,
181 # put it in the "source-version" field instead.
182 srcver = re_srchasver.search(changes["source"])
184 changes["source"] = srcver.group(1)
185 changes["source-version"] = srcver.group(2)
188 raise ParseChangesError, error
192 ################################################################################
194 def parse_changes(filename, signing_rules=0):
195 """Parses a changes file and returns a dictionary where each field is a
196 key. The mandatory first argument is the filename of the .changes
199 signing_rules is an optional argument:
201 o If signing_rules == -1, no signature is required.
202 o If signing_rules == 0 (the default), a signature is required.
203 o If signing_rules == 1, it turns on the same strict format checking
206 The rules for (signing_rules == 1)-mode are:
208 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
209 followed by any PGP header data and must end with a blank line.
211 o The data section must end with a blank line and must be followed by
212 "-----BEGIN PGP SIGNATURE-----".
215 changes_in = open_file(filename)
216 content = changes_in.read()
218 return parse_deb822(content, signing_rules)
220 ################################################################################
222 def hash_key(hashname):
223 return '%ssum' % hashname
225 ################################################################################
227 def create_hash(where, files, hashname, hashfunc):
228 """create_hash extends the passed files dict with the given hash by
229 iterating over all files on disk and passing them to the hashing
233 for f in files.keys():
235 file_handle = open_file(f)
236 except CantOpenError:
237 rejmsg.append("Could not open file %s for checksumming" % (f))
239 files[f][hash_key(hashname)] = hashfunc(file_handle)
244 ################################################################################
246 def check_hash(where, files, hashname, hashfunc):
247 """check_hash checks the given hash in the files dict against the actual
248 files on disk. The hash values need to be present consistently in
249 all file entries. It does not modify its input in any way."""
252 for f in files.keys():
254 file_handle = open_file(f)
256 # Check for the hash entry, to not trigger a KeyError.
257 if not files[f].has_key(hash_key(hashname)):
258 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
262 # Actually check the hash for correctness.
263 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
264 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
266 except CantOpenError:
267 # XXX: IS THIS THE BLOODY CASE WHEN THE FILE'S IN THE POOL!?
273 ################################################################################
275 def check_size(where, files):
276 """check_size checks the file sizes in the passed files dict against the
280 for f in files.keys():
281 actual_size = os.stat(f)[stat.ST_SIZE]
282 size = int(files[f]["size"])
283 if size != actual_size:
284 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
285 % (f, actual_size, size, where))
288 ################################################################################
290 def check_hash_fields(what, manifest):
291 """check_hash_fields ensures that there are no checksum fields in the
292 given dict that we do not know about."""
295 hashes = map(lambda x: x[0], known_hashes)
296 for field in manifest:
297 if field.startswith("checksums-"):
298 hashname = field.split("-",1)[1]
299 if hashname not in hashes:
300 rejmsg.append("Unsupported checksum field for %s "\
301 "in %s" % (hashname, what))
304 ################################################################################
306 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
307 if format >= version:
308 # The version should contain the specified hash.
311 # Import hashes from the changes
312 rejmsg = parse_checksums(".changes", files, changes, hashname)
316 # We need to calculate the hash because it can't possibly
319 return func(".changes", files, hashname, hashfunc)
321 # We could add the orig which might be in the pool to the files dict to
322 # access the checksums easily.
324 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
325 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
326 in the dsc is correct, i.e. identical to the changes file and if necessary
327 the pool. The latter task is delegated to check_hash."""
330 if not dsc.has_key('Checksums-%s' % (hashname,)):
332 # Import hashes from the dsc
333 parse_checksums(".dsc", dsc_files, dsc, hashname)
335 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
338 ################################################################################
340 def ensure_hashes(changes, dsc, files, dsc_files):
343 # Make sure we recognise the format of the Files: field in the .changes
344 format = changes.get("format", "0.0").split(".", 1)
346 format = int(format[0]), int(format[1])
348 format = int(float(format[0])), 0
350 # We need to deal with the original changes blob, as the fields we need
351 # might not be in the changes dict serialised into the .dak anymore.
352 orig_changes = parse_deb822(changes['filecontents'])
354 # Copy the checksums over to the current changes dict. This will keep
355 # the existing modifications to it intact.
356 for field in orig_changes:
357 if field.startswith('checksums-'):
358 changes[field] = orig_changes[field]
360 # Check for unsupported hashes
361 rejmsg.extend(check_hash_fields(".changes", changes))
362 rejmsg.extend(check_hash_fields(".dsc", dsc))
364 # We have to calculate the hash if we have an earlier changes version than
365 # the hash appears in rather than require it exist in the changes file
366 for hashname, hashfunc, version in known_hashes:
367 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
369 if "source" in changes["architecture"]:
370 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
375 def parse_checksums(where, files, manifest, hashname):
377 field = 'checksums-%s' % hashname
378 if not field in manifest:
380 input = manifest[field]
381 for line in input.split('\n'):
384 hash, size, file = line.strip().split(' ')
385 if not files.has_key(file):
386 rejmsg.append("%s: not present in files but in checksums-%s in %s" %
387 (file, hashname, where))
388 if not files[file]["size"] == size:
389 rejmsg.append("%s: size differs for files and checksums-%s entry "\
390 "in %s" % (file, hashname, where))
391 files[file][hash_key(hashname)] = hash
392 for f in files.keys():
393 if not files[f].has_key(hash_key(hashname)):
394 rejmsg.append("%s: no entry in checksums-%s in %s" % (file,
398 ################################################################################
400 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
402 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
405 # Make sure we have a Files: field to parse...
406 if not changes.has_key(field):
407 raise NoFilesFieldError
409 # Make sure we recognise the format of the Files: field
410 format = re_verwithext.search(changes.get("format", "0.0"))
412 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
414 format = format.groups()
415 if format[1] == None:
416 format = int(float(format[0])), 0, format[2]
418 format = int(format[0]), int(format[1]), format[2]
419 if format[2] == None:
424 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
426 if (format < (1,5) or format > (1,8)):
427 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
428 if field != "files" and format < (1,8):
429 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
431 includes_section = (not is_a_dsc) and field == "files"
433 # Parse each entry/line:
434 for i in changes[field].split('\n'):
438 section = priority = ""
441 (md5, size, section, priority, name) = s
443 (md5, size, name) = s
445 raise ParseChangesError, i
452 (section, component) = extract_component_from_section(section)
454 files[name] = Dict(size=size, section=section,
455 priority=priority, component=component)
456 files[name][hashname] = md5
460 ################################################################################
462 def force_to_utf8(s):
463 """Forces a string to UTF-8. If the string isn't already UTF-8,
464 it's assumed to be ISO-8859-1."""
469 latin1_s = unicode(s,'iso8859-1')
470 return latin1_s.encode('utf-8')
472 def rfc2047_encode(s):
473 """Encodes a (header) string per RFC2047 if necessary. If the
474 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
476 codecs.lookup('ascii')[1](s)
481 codecs.lookup('utf-8')[1](s)
482 h = email.Header.Header(s, 'utf-8', 998)
485 h = email.Header.Header(s, 'iso-8859-1', 998)
488 ################################################################################
490 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
491 # with it. I know - I'll fix the suckage and make things
494 def fix_maintainer (maintainer):
495 """Parses a Maintainer or Changed-By field and returns:
496 (1) an RFC822 compatible version,
497 (2) an RFC2047 compatible version,
501 The name is forced to UTF-8 for both (1) and (3). If the name field
502 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
503 switched to 'email (name)' format."""
504 maintainer = maintainer.strip()
506 return ('', '', '', '')
508 if maintainer.find("<") == -1:
511 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
512 email = maintainer[1:-1]
515 m = re_parse_maintainer.match(maintainer)
517 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
521 # Get an RFC2047 compliant version of the name
522 rfc2047_name = rfc2047_encode(name)
524 # Force the name to be UTF-8
525 name = force_to_utf8(name)
527 if name.find(',') != -1 or name.find('.') != -1:
528 rfc822_maint = "%s (%s)" % (email, name)
529 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
531 rfc822_maint = "%s <%s>" % (name, email)
532 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
534 if email.find("@") == -1 and email.find("buildd_") != 0:
535 raise ParseMaintError, "No @ found in email address part."
537 return (rfc822_maint, rfc2047_maint, name, email)
539 ################################################################################
541 # sendmail wrapper, takes _either_ a message string or a file as arguments
542 def send_mail (message, filename=""):
543 # If we've been passed a string dump it into a temporary file
545 filename = tempfile.mktemp()
546 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
547 os.write (fd, message)
551 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
553 raise SendmailFailedError, output
555 # Clean up any temporary files
559 ################################################################################
561 def poolify (source, component):
564 if source[:3] == "lib":
565 return component + source[:4] + '/' + source + '/'
567 return component + source[:1] + '/' + source + '/'
569 ################################################################################
571 def move (src, dest, overwrite = 0, perms = 0664):
572 if os.path.exists(dest) and os.path.isdir(dest):
575 dest_dir = os.path.dirname(dest)
576 if not os.path.exists(dest_dir):
577 umask = os.umask(00000)
578 os.makedirs(dest_dir, 02775)
580 #print "Moving %s to %s..." % (src, dest)
581 if os.path.exists(dest) and os.path.isdir(dest):
582 dest += '/' + os.path.basename(src)
583 # Don't overwrite unless forced to
584 if os.path.exists(dest):
586 fubar("Can't move %s to %s - file already exists." % (src, dest))
588 if not os.access(dest, os.W_OK):
589 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
590 shutil.copy2(src, dest)
591 os.chmod(dest, perms)
594 def copy (src, dest, overwrite = 0, perms = 0664):
595 if os.path.exists(dest) and os.path.isdir(dest):
598 dest_dir = os.path.dirname(dest)
599 if not os.path.exists(dest_dir):
600 umask = os.umask(00000)
601 os.makedirs(dest_dir, 02775)
603 #print "Copying %s to %s..." % (src, dest)
604 if os.path.exists(dest) and os.path.isdir(dest):
605 dest += '/' + os.path.basename(src)
606 # Don't overwrite unless forced to
607 if os.path.exists(dest):
609 raise FileExistsError
611 if not os.access(dest, os.W_OK):
612 raise CantOverwriteError
613 shutil.copy2(src, dest)
614 os.chmod(dest, perms)
616 ################################################################################
619 res = socket.gethostbyaddr(socket.gethostname())
620 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
621 if database_hostname:
622 return database_hostname
626 def which_conf_file ():
627 res = socket.gethostbyaddr(socket.gethostname())
628 if Cnf.get("Config::" + res[0] + "::DakConfig"):
629 return Cnf["Config::" + res[0] + "::DakConfig"]
631 return default_config
633 def which_apt_conf_file ():
634 res = socket.gethostbyaddr(socket.gethostname())
635 if Cnf.get("Config::" + res[0] + "::AptConfig"):
636 return Cnf["Config::" + res[0] + "::AptConfig"]
638 return default_apt_config
640 def which_alias_file():
641 hostname = socket.gethostbyaddr(socket.gethostname())[0]
642 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
643 if os.path.exists(aliasfn):
648 ################################################################################
650 # Escape characters which have meaning to SQL's regex comparison operator ('~')
651 # (woefully incomplete)
654 s = s.replace('+', '\\\\+')
655 s = s.replace('.', '\\\\.')
658 ################################################################################
660 # Perform a substition of template
661 def TemplateSubst(map, filename):
662 file = open_file(filename)
663 template = file.read()
665 template = template.replace(x,map[x])
669 ################################################################################
671 def fubar(msg, exit_code=1):
672 sys.stderr.write("E: %s\n" % (msg))
676 sys.stderr.write("W: %s\n" % (msg))
678 ################################################################################
680 # Returns the user name with a laughable attempt at rfc822 conformancy
681 # (read: removing stray periods).
683 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
685 ################################################################################
695 return ("%d%s" % (c, t))
697 ################################################################################
699 def cc_fix_changes (changes):
700 o = changes.get("architecture", "")
702 del changes["architecture"]
703 changes["architecture"] = {}
705 changes["architecture"][j] = 1
707 # Sort by source name, source version, 'have source', and then by filename
708 def changes_compare (a, b):
710 a_changes = parse_changes(a)
715 b_changes = parse_changes(b)
719 cc_fix_changes (a_changes)
720 cc_fix_changes (b_changes)
722 # Sort by source name
723 a_source = a_changes.get("source")
724 b_source = b_changes.get("source")
725 q = cmp (a_source, b_source)
729 # Sort by source version
730 a_version = a_changes.get("version", "0")
731 b_version = b_changes.get("version", "0")
732 q = apt_pkg.VersionCompare(a_version, b_version)
736 # Sort by 'have source'
737 a_has_source = a_changes["architecture"].get("source")
738 b_has_source = b_changes["architecture"].get("source")
739 if a_has_source and not b_has_source:
741 elif b_has_source and not a_has_source:
744 # Fall back to sort by filename
747 ################################################################################
749 def find_next_free (dest, too_many=100):
752 while os.path.exists(dest) and extra < too_many:
753 dest = orig_dest + '.' + repr(extra)
755 if extra >= too_many:
756 raise NoFreeFilenameError
759 ################################################################################
761 def result_join (original, sep = '\t'):
763 for i in xrange(len(original)):
764 if original[i] == None:
767 list.append(original[i])
768 return sep.join(list)
770 ################################################################################
772 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
774 for line in str.split('\n'):
776 if line or include_blank_lines:
777 out += "%s%s\n" % (prefix, line)
778 # Strip trailing new line
783 ################################################################################
785 def validate_changes_file_arg(filename, require_changes=1):
786 """'filename' is either a .changes or .dak file. If 'filename' is a
787 .dak file, it's changed to be the corresponding .changes file. The
788 function then checks if the .changes file a) exists and b) is
789 readable and returns the .changes filename if so. If there's a
790 problem, the next action depends on the option 'require_changes'
793 o If 'require_changes' == -1, errors are ignored and the .changes
794 filename is returned.
795 o If 'require_changes' == 0, a warning is given and 'None' is returned.
796 o If 'require_changes' == 1, a fatal error is raised.
800 orig_filename = filename
801 if filename.endswith(".dak"):
802 filename = filename[:-4]+".changes"
804 if not filename.endswith(".changes"):
805 error = "invalid file type; not a changes file"
807 if not os.access(filename,os.R_OK):
808 if os.path.exists(filename):
809 error = "permission denied"
811 error = "file not found"
814 if require_changes == 1:
815 fubar("%s: %s." % (orig_filename, error))
816 elif require_changes == 0:
817 warn("Skipping %s - %s" % (orig_filename, error))
819 else: # We only care about the .dak file
824 ################################################################################
827 return (arch != "source" and arch != "all")
829 ################################################################################
831 def join_with_commas_and(list):
832 if len(list) == 0: return "nothing"
833 if len(list) == 1: return list[0]
834 return ", ".join(list[:-1]) + " and " + list[-1]
836 ################################################################################
841 (pkg, version, constraint) = atom
843 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
846 pp_deps.append(pp_dep)
847 return " |".join(pp_deps)
849 ################################################################################
854 ################################################################################
856 # Handle -a, -c and -s arguments; returns them as SQL constraints
857 def parse_args(Options):
861 for suite in split_args(Options["Suite"]):
862 suite_id = database.get_suite_id(suite)
864 warn("suite '%s' not recognised." % (suite))
866 suite_ids_list.append(suite_id)
868 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
870 fubar("No valid suite given.")
875 if Options["Component"]:
876 component_ids_list = []
877 for component in split_args(Options["Component"]):
878 component_id = database.get_component_id(component)
879 if component_id == -1:
880 warn("component '%s' not recognised." % (component))
882 component_ids_list.append(component_id)
883 if component_ids_list:
884 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
886 fubar("No valid component given.")
890 # Process architecture
891 con_architectures = ""
892 if Options["Architecture"]:
895 for architecture in split_args(Options["Architecture"]):
896 if architecture == "source":
899 architecture_id = database.get_architecture_id(architecture)
900 if architecture_id == -1:
901 warn("architecture '%s' not recognised." % (architecture))
903 arch_ids_list.append(architecture_id)
905 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
908 fubar("No valid architecture given.")
912 return (con_suites, con_architectures, con_components, check_source)
914 ################################################################################
916 # Inspired(tm) by Bryn Keller's print_exc_plus (See
917 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
920 tb = sys.exc_info()[2]
929 traceback.print_exc()
931 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
932 frame.f_code.co_filename,
934 for key, value in frame.f_locals.items():
935 print "\t%20s = " % key,
939 print "<unable to print>"
941 ################################################################################
943 def try_with_debug(function):
951 ################################################################################
953 # Function for use in sorting lists of architectures.
954 # Sorts normally except that 'source' dominates all others.
956 def arch_compare_sw (a, b):
957 if a == "source" and b == "source":
966 ################################################################################
968 # Split command line arguments which can be separated by either commas
969 # or whitespace. If dwim is set, it will complain about string ending
970 # in comma since this usually means someone did 'dak ls -a i386, m68k
971 # foo' or something and the inevitable confusion resulting from 'm68k'
972 # being treated as an argument is undesirable.
974 def split_args (s, dwim=1):
975 if s.find(",") == -1:
978 if s[-1:] == "," and dwim:
979 fubar("split_args: found trailing comma, spurious space maybe?")
982 ################################################################################
984 def Dict(**dict): return dict
986 ########################################
988 # Our very own version of commands.getouputstatus(), hacked to support
990 def gpgv_get_status_output(cmd, status_read, status_write):
991 cmd = ['/bin/sh', '-c', cmd]
992 p2cread, p2cwrite = os.pipe()
993 c2pread, c2pwrite = os.pipe()
994 errout, errin = os.pipe()
1004 for i in range(3, 256):
1005 if i != status_write:
1011 os.execvp(cmd[0], cmd)
1017 os.dup2(c2pread, c2pwrite)
1018 os.dup2(errout, errin)
1020 output = status = ""
1022 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1025 r = os.read(fd, 8196)
1027 more_data.append(fd)
1028 if fd == c2pwrite or fd == errin:
1030 elif fd == status_read:
1033 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1035 pid, exit_status = os.waitpid(pid, 0)
1037 os.close(status_write)
1038 os.close(status_read)
1048 return output, status, exit_status
1050 ################################################################################
1052 def process_gpgv_output(status):
1053 # Process the status-fd output
1056 for line in status.split('\n'):
1060 split = line.split()
1062 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1064 (gnupg, keyword) = split[:2]
1065 if gnupg != "[GNUPG:]":
1066 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1069 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1070 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1073 keywords[keyword] = args
1075 return (keywords, internal_error)
1077 ################################################################################
1079 def retrieve_key (filename, keyserver=None, keyring=None):
1080 """Retrieve the key that signed 'filename' from 'keyserver' and
1081 add it to 'keyring'. Returns nothing on success, or an error message
1084 # Defaults for keyserver and keyring
1086 keyserver = Cnf["Dinstall::KeyServer"]
1088 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1090 # Ensure the filename contains no shell meta-characters or other badness
1091 if not re_taint_free.match(filename):
1092 return "%s: tainted filename" % (filename)
1094 # Invoke gpgv on the file
1095 status_read, status_write = os.pipe();
1096 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1097 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1099 # Process the status-fd output
1100 (keywords, internal_error) = process_gpgv_output(status)
1102 return internal_error
1104 if not keywords.has_key("NO_PUBKEY"):
1105 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1107 fingerprint = keywords["NO_PUBKEY"][0]
1108 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1109 # it'll try to create a lockfile in /dev. A better solution might
1110 # be a tempfile or something.
1111 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1112 % (Cnf["Dinstall::SigningKeyring"])
1113 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1114 % (keyring, keyserver, fingerprint)
1115 (result, output) = commands.getstatusoutput(cmd)
1117 return "'%s' failed with exit code %s" % (cmd, result)
1121 ################################################################################
1123 def gpg_keyring_args(keyrings=None):
1125 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1127 return " ".join(["--keyring %s" % x for x in keyrings])
1129 ################################################################################
1131 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1132 """Check the signature of a file and return the fingerprint if the
1133 signature is valid or 'None' if it's not. The first argument is the
1134 filename whose signature should be checked. The second argument is a
1135 reject function and is called when an error is found. The reject()
1136 function must allow for two arguments: the first is the error message,
1137 the second is an optional prefix string. It's possible for reject()
1138 to be called more than once during an invocation of check_signature().
1139 The third argument is optional and is the name of the files the
1140 detached signature applies to. The fourth argument is optional and is
1141 a *list* of keyrings to use. 'autofetch' can either be None, True or
1142 False. If None, the default behaviour specified in the config will be
1145 # Ensure the filename contains no shell meta-characters or other badness
1146 if not re_taint_free.match(sig_filename):
1147 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1150 if data_filename and not re_taint_free.match(data_filename):
1151 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1155 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1157 # Autofetch the signing key if that's enabled
1158 if autofetch == None:
1159 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1161 error_msg = retrieve_key(sig_filename)
1166 # Build the command line
1167 status_read, status_write = os.pipe();
1168 cmd = "gpgv --status-fd %s %s %s %s" % (
1169 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1171 # Invoke gpgv on the file
1172 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1174 # Process the status-fd output
1175 (keywords, internal_error) = process_gpgv_output(status)
1177 # If we failed to parse the status-fd output, let's just whine and bail now
1179 reject("internal error while performing signature check on %s." % (sig_filename))
1180 reject(internal_error, "")
1181 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1185 # Now check for obviously bad things in the processed output
1186 if keywords.has_key("KEYREVOKED"):
1187 reject("The key used to sign %s has been revoked." % (sig_filename))
1189 if keywords.has_key("BADSIG"):
1190 reject("bad signature on %s." % (sig_filename))
1192 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1193 reject("failed to check signature on %s." % (sig_filename))
1195 if keywords.has_key("NO_PUBKEY"):
1196 args = keywords["NO_PUBKEY"]
1199 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1201 if keywords.has_key("BADARMOR"):
1202 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1204 if keywords.has_key("NODATA"):
1205 reject("no signature found in %s." % (sig_filename))
1207 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1208 args = keywords["KEYEXPIRED"]
1211 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1217 # Next check gpgv exited with a zero return code
1219 reject("gpgv failed while checking %s." % (sig_filename))
1221 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1223 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1226 # Sanity check the good stuff we expect
1227 if not keywords.has_key("VALIDSIG"):
1228 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1231 args = keywords["VALIDSIG"]
1233 reject("internal error while checking signature on %s." % (sig_filename))
1236 fingerprint = args[0]
1237 if not keywords.has_key("GOODSIG"):
1238 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1240 if not keywords.has_key("SIG_ID"):
1241 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1244 # Finally ensure there's not something we don't recognise
1245 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1246 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1247 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1249 for keyword in keywords.keys():
1250 if not known_keywords.has_key(keyword):
1251 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1259 ################################################################################
1261 def gpg_get_key_addresses(fingerprint):
1262 """retreive email addresses from gpg key uids for a given fingerprint"""
1263 addresses = key_uid_email_cache.get(fingerprint)
1264 if addresses != None:
1267 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1268 % (gpg_keyring_args(), fingerprint)
1269 (result, output) = commands.getstatusoutput(cmd)
1271 for l in output.split('\n'):
1272 m = re_gpg_uid.match(l)
1274 addresses.add(m.group(1))
1275 key_uid_email_cache[fingerprint] = addresses
1278 ################################################################################
1280 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1282 def wrap(paragraph, max_length, prefix=""):
1286 words = paragraph.split()
1289 word_size = len(word)
1290 if word_size > max_length:
1292 s += line + '\n' + prefix
1293 s += word + '\n' + prefix
1296 new_length = len(line) + word_size + 1
1297 if new_length > max_length:
1298 s += line + '\n' + prefix
1311 ################################################################################
1313 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1314 # Returns fixed 'src'
1315 def clean_symlink (src, dest, root):
1316 src = src.replace(root, '', 1)
1317 dest = dest.replace(root, '', 1)
1318 dest = os.path.dirname(dest)
1319 new_src = '../' * len(dest.split('/'))
1320 return new_src + src
1322 ################################################################################
1324 def temp_filename(directory=None, dotprefix=None, perms=0700):
1325 """Return a secure and unique filename by pre-creating it.
1326 If 'directory' is non-null, it will be the directory the file is pre-created in.
1327 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1330 old_tempdir = tempfile.tempdir
1331 tempfile.tempdir = directory
1333 filename = tempfile.mktemp()
1336 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1337 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1341 tempfile.tempdir = old_tempdir
1345 ################################################################################
1347 # checks if the user part of the email is listed in the alias file
1349 def is_email_alias(email):
1351 if alias_cache == None:
1352 aliasfn = which_alias_file()
1355 for l in open(aliasfn):
1356 alias_cache.add(l.split(':')[0])
1357 uid = email.split('@')[0]
1358 return uid in alias_cache
1360 ################################################################################
1364 Cnf = apt_pkg.newConfiguration()
1365 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1367 if which_conf_file() != default_config:
1368 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1370 ################################################################################