2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 from dak_exceptions import *
41 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
42 re_multi_line_field, re_srchasver, re_verwithext, \
43 re_parse_maintainer, re_taint_free, re_gpg_uid
45 ################################################################################
47 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
48 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
50 alias_cache = None #: Cache for email alias checks
51 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
53 # (hashname, function, earliest_changes_version)
54 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
55 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
57 ################################################################################
60 """ Escape html chars """
61 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
63 ################################################################################
65 def open_file(filename, mode='r'):
67 Open C{file}, return fileobject.
69 @type filename: string
70 @param filename: path/filename to open
73 @param mode: open mode
76 @return: open fileobject
78 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
82 f = open(filename, mode)
84 raise CantOpenError, filename
87 ################################################################################
89 def our_raw_input(prompt=""):
91 sys.stdout.write(prompt)
97 sys.stderr.write("\nUser interrupt (^D).\n")
100 ################################################################################
102 def extract_component_from_section(section):
105 if section.find('/') != -1:
106 component = section.split('/')[0]
108 # Expand default component
110 if Cnf.has_key("Component::%s" % section):
115 return (section, component)
117 ################################################################################
119 def parse_deb822(contents, signing_rules=0):
123 # Split the lines in the input, keeping the linebreaks.
124 lines = contents.splitlines(True)
127 raise ParseChangesError, "[Empty changes file]"
129 # Reindex by line number so we can easily verify the format of
135 indexed_lines[index] = line[:-1]
139 num_of_lines = len(indexed_lines.keys())
142 while index < num_of_lines:
144 line = indexed_lines[index]
146 if signing_rules == 1:
148 if index > num_of_lines:
149 raise InvalidDscError, index
150 line = indexed_lines[index]
151 if not line.startswith("-----BEGIN PGP SIGNATURE"):
152 raise InvalidDscError, index
157 if line.startswith("-----BEGIN PGP SIGNATURE"):
159 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
161 if signing_rules == 1:
162 while index < num_of_lines and line != "":
164 line = indexed_lines[index]
166 # If we're not inside the signed data, don't process anything
167 if signing_rules >= 0 and not inside_signature:
169 slf = re_single_line_field.match(line)
171 field = slf.groups()[0].lower()
172 changes[field] = slf.groups()[1]
176 changes[field] += '\n'
178 mlf = re_multi_line_field.match(line)
181 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
182 if first == 1 and changes[field] != "":
183 changes[field] += '\n'
185 changes[field] += mlf.groups()[0] + '\n'
189 if signing_rules == 1 and inside_signature:
190 raise InvalidDscError, index
192 changes["filecontents"] = "".join(lines)
194 if changes.has_key("source"):
195 # Strip the source version in brackets from the source field,
196 # put it in the "source-version" field instead.
197 srcver = re_srchasver.search(changes["source"])
199 changes["source"] = srcver.group(1)
200 changes["source-version"] = srcver.group(2)
203 raise ParseChangesError, error
207 ################################################################################
209 def parse_changes(filename, signing_rules=0):
211 Parses a changes file and returns a dictionary where each field is a
212 key. The mandatory first argument is the filename of the .changes
215 signing_rules is an optional argument:
217 - If signing_rules == -1, no signature is required.
218 - If signing_rules == 0 (the default), a signature is required.
219 - If signing_rules == 1, it turns on the same strict format checking
222 The rules for (signing_rules == 1)-mode are:
224 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
225 followed by any PGP header data and must end with a blank line.
227 - The data section must end with a blank line and must be followed by
228 "-----BEGIN PGP SIGNATURE-----".
231 changes_in = open_file(filename)
232 content = changes_in.read()
234 return parse_deb822(content, signing_rules)
236 ################################################################################
238 def hash_key(hashname):
239 return '%ssum' % hashname
241 ################################################################################
243 def create_hash(where, files, hashname, hashfunc):
245 create_hash extends the passed files dict with the given hash by
246 iterating over all files on disk and passing them to the hashing
251 for f in files.keys():
253 file_handle = open_file(f)
254 except CantOpenError:
255 rejmsg.append("Could not open file %s for checksumming" % (f))
257 files[f][hash_key(hashname)] = hashfunc(file_handle)
262 ################################################################################
264 def check_hash(where, files, hashname, hashfunc):
266 check_hash checks the given hash in the files dict against the actual
267 files on disk. The hash values need to be present consistently in
268 all file entries. It does not modify its input in any way.
272 for f in files.keys():
276 file_handle = open_file(f)
278 # Check for the hash entry, to not trigger a KeyError.
279 if not files[f].has_key(hash_key(hashname)):
280 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
284 # Actually check the hash for correctness.
285 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
286 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
288 except CantOpenError:
289 # TODO: This happens when the file is in the pool.
290 # warn("Cannot open file %s" % f)
297 ################################################################################
299 def check_size(where, files):
301 check_size checks the file sizes in the passed files dict against the
306 for f in files.keys():
311 # TODO: This happens when the file is in the pool.
315 actual_size = entry[stat.ST_SIZE]
316 size = int(files[f]["size"])
317 if size != actual_size:
318 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
319 % (f, actual_size, size, where))
322 ################################################################################
324 def check_hash_fields(what, manifest):
326 check_hash_fields ensures that there are no checksum fields in the
327 given dict that we do not know about.
331 hashes = map(lambda x: x[0], known_hashes)
332 for field in manifest:
333 if field.startswith("checksums-"):
334 hashname = field.split("-",1)[1]
335 if hashname not in hashes:
336 rejmsg.append("Unsupported checksum field for %s "\
337 "in %s" % (hashname, what))
340 ################################################################################
342 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
343 if format >= version:
344 # The version should contain the specified hash.
347 # Import hashes from the changes
348 rejmsg = parse_checksums(".changes", files, changes, hashname)
352 # We need to calculate the hash because it can't possibly
355 return func(".changes", files, hashname, hashfunc)
357 # We could add the orig which might be in the pool to the files dict to
358 # access the checksums easily.
360 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
362 ensure_dsc_hashes' task is to ensure that each and every *present* hash
363 in the dsc is correct, i.e. identical to the changes file and if necessary
364 the pool. The latter task is delegated to check_hash.
368 if not dsc.has_key('Checksums-%s' % (hashname,)):
370 # Import hashes from the dsc
371 parse_checksums(".dsc", dsc_files, dsc, hashname)
373 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
376 ################################################################################
378 def ensure_hashes(changes, dsc, files, dsc_files):
381 # Make sure we recognise the format of the Files: field in the .changes
382 format = changes.get("format", "0.0").split(".", 1)
384 format = int(format[0]), int(format[1])
386 format = int(float(format[0])), 0
388 # We need to deal with the original changes blob, as the fields we need
389 # might not be in the changes dict serialised into the .dak anymore.
390 orig_changes = parse_deb822(changes['filecontents'])
392 # Copy the checksums over to the current changes dict. This will keep
393 # the existing modifications to it intact.
394 for field in orig_changes:
395 if field.startswith('checksums-'):
396 changes[field] = orig_changes[field]
398 # Check for unsupported hashes
399 rejmsg.extend(check_hash_fields(".changes", changes))
400 rejmsg.extend(check_hash_fields(".dsc", dsc))
402 # We have to calculate the hash if we have an earlier changes version than
403 # the hash appears in rather than require it exist in the changes file
404 for hashname, hashfunc, version in known_hashes:
405 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
407 if "source" in changes["architecture"]:
408 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
413 def parse_checksums(where, files, manifest, hashname):
415 field = 'checksums-%s' % hashname
416 if not field in manifest:
418 for line in manifest[field].split('\n'):
421 checksum, size, checkfile = line.strip().split(' ')
422 if not files.has_key(checkfile):
423 # TODO: check for the file's entry in the original files dict, not
424 # the one modified by (auto)byhand and other weird stuff
425 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
426 # (file, hashname, where))
428 if not files[checkfile]["size"] == size:
429 rejmsg.append("%s: size differs for files and checksums-%s entry "\
430 "in %s" % (checkfile, hashname, where))
432 files[checkfile][hash_key(hashname)] = checksum
433 for f in files.keys():
434 if not files[f].has_key(hash_key(hashname)):
435 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
439 ################################################################################
441 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
443 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
446 # Make sure we have a Files: field to parse...
447 if not changes.has_key(field):
448 raise NoFilesFieldError
450 # Make sure we recognise the format of the Files: field
451 format = re_verwithext.search(changes.get("format", "0.0"))
453 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
455 format = format.groups()
456 if format[1] == None:
457 format = int(float(format[0])), 0, format[2]
459 format = int(format[0]), int(format[1]), format[2]
460 if format[2] == None:
464 # format = (1,0) are the only formats we currently accept,
465 # format = (0,0) are missing format headers of which we still
466 # have some in the archive.
467 if format != (1,0) and format != (0,0):
468 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
470 if (format < (1,5) or format > (1,8)):
471 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
472 if field != "files" and format < (1,8):
473 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
475 includes_section = (not is_a_dsc) and field == "files"
477 # Parse each entry/line:
478 for i in changes[field].split('\n'):
482 section = priority = ""
485 (md5, size, section, priority, name) = s
487 (md5, size, name) = s
489 raise ParseChangesError, i
496 (section, component) = extract_component_from_section(section)
498 files[name] = Dict(size=size, section=section,
499 priority=priority, component=component)
500 files[name][hashname] = md5
504 ################################################################################
506 def force_to_utf8(s):
508 Forces a string to UTF-8. If the string isn't already UTF-8,
509 it's assumed to be ISO-8859-1.
515 latin1_s = unicode(s,'iso8859-1')
516 return latin1_s.encode('utf-8')
518 def rfc2047_encode(s):
520 Encodes a (header) string per RFC2047 if necessary. If the
521 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
524 codecs.lookup('ascii')[1](s)
529 codecs.lookup('utf-8')[1](s)
530 h = email.Header.Header(s, 'utf-8', 998)
533 h = email.Header.Header(s, 'iso-8859-1', 998)
536 ################################################################################
538 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
539 # with it. I know - I'll fix the suckage and make things
542 def fix_maintainer (maintainer):
544 Parses a Maintainer or Changed-By field and returns:
545 1. an RFC822 compatible version,
546 2. an RFC2047 compatible version,
550 The name is forced to UTF-8 for both 1. and 3.. If the name field
551 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
552 switched to 'email (name)' format.
555 maintainer = maintainer.strip()
557 return ('', '', '', '')
559 if maintainer.find("<") == -1:
562 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
563 email = maintainer[1:-1]
566 m = re_parse_maintainer.match(maintainer)
568 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
572 # Get an RFC2047 compliant version of the name
573 rfc2047_name = rfc2047_encode(name)
575 # Force the name to be UTF-8
576 name = force_to_utf8(name)
578 if name.find(',') != -1 or name.find('.') != -1:
579 rfc822_maint = "%s (%s)" % (email, name)
580 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
582 rfc822_maint = "%s <%s>" % (name, email)
583 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
585 if email.find("@") == -1 and email.find("buildd_") != 0:
586 raise ParseMaintError, "No @ found in email address part."
588 return (rfc822_maint, rfc2047_maint, name, email)
590 ################################################################################
592 def send_mail (message, filename=""):
593 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
595 # If we've been passed a string dump it into a temporary file
597 (fd, filename) = tempfile.mkstemp()
598 os.write (fd, message)
602 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
604 raise SendmailFailedError, output
606 # Clean up any temporary files
610 ################################################################################
612 def poolify (source, component):
615 if source[:3] == "lib":
616 return component + source[:4] + '/' + source + '/'
618 return component + source[:1] + '/' + source + '/'
620 ################################################################################
622 def move (src, dest, overwrite = 0, perms = 0664):
623 if os.path.exists(dest) and os.path.isdir(dest):
626 dest_dir = os.path.dirname(dest)
627 if not os.path.exists(dest_dir):
628 umask = os.umask(00000)
629 os.makedirs(dest_dir, 02775)
631 #print "Moving %s to %s..." % (src, dest)
632 if os.path.exists(dest) and os.path.isdir(dest):
633 dest += '/' + os.path.basename(src)
634 # Don't overwrite unless forced to
635 if os.path.exists(dest):
637 fubar("Can't move %s to %s - file already exists." % (src, dest))
639 if not os.access(dest, os.W_OK):
640 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
641 shutil.copy2(src, dest)
642 os.chmod(dest, perms)
645 def copy (src, dest, overwrite = 0, perms = 0664):
646 if os.path.exists(dest) and os.path.isdir(dest):
649 dest_dir = os.path.dirname(dest)
650 if not os.path.exists(dest_dir):
651 umask = os.umask(00000)
652 os.makedirs(dest_dir, 02775)
654 #print "Copying %s to %s..." % (src, dest)
655 if os.path.exists(dest) and os.path.isdir(dest):
656 dest += '/' + os.path.basename(src)
657 # Don't overwrite unless forced to
658 if os.path.exists(dest):
660 raise FileExistsError
662 if not os.access(dest, os.W_OK):
663 raise CantOverwriteError
664 shutil.copy2(src, dest)
665 os.chmod(dest, perms)
667 ################################################################################
670 res = socket.gethostbyaddr(socket.gethostname())
671 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
672 if database_hostname:
673 return database_hostname
677 def which_conf_file ():
678 res = socket.gethostbyaddr(socket.gethostname())
679 if Cnf.get("Config::" + res[0] + "::DakConfig"):
680 return Cnf["Config::" + res[0] + "::DakConfig"]
682 return default_config
684 def which_apt_conf_file ():
685 res = socket.gethostbyaddr(socket.gethostname())
686 if Cnf.get("Config::" + res[0] + "::AptConfig"):
687 return Cnf["Config::" + res[0] + "::AptConfig"]
689 return default_apt_config
691 def which_alias_file():
692 hostname = socket.gethostbyaddr(socket.gethostname())[0]
693 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
694 if os.path.exists(aliasfn):
699 ################################################################################
701 # Escape characters which have meaning to SQL's regex comparison operator ('~')
702 # (woefully incomplete)
705 s = s.replace('+', '\\\\+')
706 s = s.replace('.', '\\\\.')
709 ################################################################################
711 def TemplateSubst(map, filename):
712 """ Perform a substition of template """
713 templatefile = open_file(filename)
714 template = templatefile.read()
716 template = template.replace(x,map[x])
720 ################################################################################
722 def fubar(msg, exit_code=1):
723 sys.stderr.write("E: %s\n" % (msg))
727 sys.stderr.write("W: %s\n" % (msg))
729 ################################################################################
731 # Returns the user name with a laughable attempt at rfc822 conformancy
732 # (read: removing stray periods).
734 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
736 ################################################################################
746 return ("%d%s" % (c, t))
748 ################################################################################
750 def cc_fix_changes (changes):
751 o = changes.get("architecture", "")
753 del changes["architecture"]
754 changes["architecture"] = {}
756 changes["architecture"][j] = 1
758 def changes_compare (a, b):
759 """ Sort by source name, source version, 'have source', and then by filename """
761 a_changes = parse_changes(a)
766 b_changes = parse_changes(b)
770 cc_fix_changes (a_changes)
771 cc_fix_changes (b_changes)
773 # Sort by source name
774 a_source = a_changes.get("source")
775 b_source = b_changes.get("source")
776 q = cmp (a_source, b_source)
780 # Sort by source version
781 a_version = a_changes.get("version", "0")
782 b_version = b_changes.get("version", "0")
783 q = apt_pkg.VersionCompare(a_version, b_version)
787 # Sort by 'have source'
788 a_has_source = a_changes["architecture"].get("source")
789 b_has_source = b_changes["architecture"].get("source")
790 if a_has_source and not b_has_source:
792 elif b_has_source and not a_has_source:
795 # Fall back to sort by filename
798 ################################################################################
800 def find_next_free (dest, too_many=100):
803 while os.path.exists(dest) and extra < too_many:
804 dest = orig_dest + '.' + repr(extra)
806 if extra >= too_many:
807 raise NoFreeFilenameError
810 ################################################################################
812 def result_join (original, sep = '\t'):
814 for i in xrange(len(original)):
815 if original[i] == None:
816 resultlist.append("")
818 resultlist.append(original[i])
819 return sep.join(resultlist)
821 ################################################################################
823 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
825 for line in str.split('\n'):
827 if line or include_blank_lines:
828 out += "%s%s\n" % (prefix, line)
829 # Strip trailing new line
834 ################################################################################
836 def validate_changes_file_arg(filename, require_changes=1):
838 'filename' is either a .changes or .dak file. If 'filename' is a
839 .dak file, it's changed to be the corresponding .changes file. The
840 function then checks if the .changes file a) exists and b) is
841 readable and returns the .changes filename if so. If there's a
842 problem, the next action depends on the option 'require_changes'
845 - If 'require_changes' == -1, errors are ignored and the .changes
846 filename is returned.
847 - If 'require_changes' == 0, a warning is given and 'None' is returned.
848 - If 'require_changes' == 1, a fatal error is raised.
853 orig_filename = filename
854 if filename.endswith(".dak"):
855 filename = filename[:-4]+".changes"
857 if not filename.endswith(".changes"):
858 error = "invalid file type; not a changes file"
860 if not os.access(filename,os.R_OK):
861 if os.path.exists(filename):
862 error = "permission denied"
864 error = "file not found"
867 if require_changes == 1:
868 fubar("%s: %s." % (orig_filename, error))
869 elif require_changes == 0:
870 warn("Skipping %s - %s" % (orig_filename, error))
872 else: # We only care about the .dak file
877 ################################################################################
880 return (arch != "source" and arch != "all")
882 ################################################################################
884 def join_with_commas_and(list):
885 if len(list) == 0: return "nothing"
886 if len(list) == 1: return list[0]
887 return ", ".join(list[:-1]) + " and " + list[-1]
889 ################################################################################
894 (pkg, version, constraint) = atom
896 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
899 pp_deps.append(pp_dep)
900 return " |".join(pp_deps)
902 ################################################################################
907 ################################################################################
909 def parse_args(Options):
910 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
914 for suite in split_args(Options["Suite"]):
915 suite_id = database.get_suite_id(suite)
917 warn("suite '%s' not recognised." % (suite))
919 suite_ids_list.append(suite_id)
921 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
923 fubar("No valid suite given.")
928 if Options["Component"]:
929 component_ids_list = []
930 for component in split_args(Options["Component"]):
931 component_id = database.get_component_id(component)
932 if component_id == -1:
933 warn("component '%s' not recognised." % (component))
935 component_ids_list.append(component_id)
936 if component_ids_list:
937 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
939 fubar("No valid component given.")
943 # Process architecture
944 con_architectures = ""
945 if Options["Architecture"]:
948 for architecture in split_args(Options["Architecture"]):
949 if architecture == "source":
952 architecture_id = database.get_architecture_id(architecture)
953 if architecture_id == -1:
954 warn("architecture '%s' not recognised." % (architecture))
956 arch_ids_list.append(architecture_id)
958 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
961 fubar("No valid architecture given.")
965 return (con_suites, con_architectures, con_components, check_source)
967 ################################################################################
969 # Inspired(tm) by Bryn Keller's print_exc_plus (See
970 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
973 tb = sys.exc_info()[2]
982 traceback.print_exc()
984 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
985 frame.f_code.co_filename,
987 for key, value in frame.f_locals.items():
988 print "\t%20s = " % key,
992 print "<unable to print>"
994 ################################################################################
996 def try_with_debug(function):
1004 ################################################################################
1006 def arch_compare_sw (a, b):
1008 Function for use in sorting lists of architectures.
1010 Sorts normally except that 'source' dominates all others.
1013 if a == "source" and b == "source":
1022 ################################################################################
1024 def split_args (s, dwim=1):
1026 Split command line arguments which can be separated by either commas
1027 or whitespace. If dwim is set, it will complain about string ending
1028 in comma since this usually means someone did 'dak ls -a i386, m68k
1029 foo' or something and the inevitable confusion resulting from 'm68k'
1030 being treated as an argument is undesirable.
1033 if s.find(",") == -1:
1036 if s[-1:] == "," and dwim:
1037 fubar("split_args: found trailing comma, spurious space maybe?")
1040 ################################################################################
1042 def Dict(**dict): return dict
1044 ########################################
1046 def gpgv_get_status_output(cmd, status_read, status_write):
1048 Our very own version of commands.getouputstatus(), hacked to support
1052 cmd = ['/bin/sh', '-c', cmd]
1053 p2cread, p2cwrite = os.pipe()
1054 c2pread, c2pwrite = os.pipe()
1055 errout, errin = os.pipe()
1065 for i in range(3, 256):
1066 if i != status_write:
1072 os.execvp(cmd[0], cmd)
1078 os.dup2(c2pread, c2pwrite)
1079 os.dup2(errout, errin)
1081 output = status = ""
1083 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1086 r = os.read(fd, 8196)
1088 more_data.append(fd)
1089 if fd == c2pwrite or fd == errin:
1091 elif fd == status_read:
1094 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1096 pid, exit_status = os.waitpid(pid, 0)
1098 os.close(status_write)
1099 os.close(status_read)
1109 return output, status, exit_status
1111 ################################################################################
1113 def process_gpgv_output(status):
1114 # Process the status-fd output
1117 for line in status.split('\n'):
1121 split = line.split()
1123 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1125 (gnupg, keyword) = split[:2]
1126 if gnupg != "[GNUPG:]":
1127 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1130 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1131 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1134 keywords[keyword] = args
1136 return (keywords, internal_error)
1138 ################################################################################
1140 def retrieve_key (filename, keyserver=None, keyring=None):
1142 Retrieve the key that signed 'filename' from 'keyserver' and
1143 add it to 'keyring'. Returns nothing on success, or an error message
1147 # Defaults for keyserver and keyring
1149 keyserver = Cnf["Dinstall::KeyServer"]
1151 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1153 # Ensure the filename contains no shell meta-characters or other badness
1154 if not re_taint_free.match(filename):
1155 return "%s: tainted filename" % (filename)
1157 # Invoke gpgv on the file
1158 status_read, status_write = os.pipe()
1159 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1160 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1162 # Process the status-fd output
1163 (keywords, internal_error) = process_gpgv_output(status)
1165 return internal_error
1167 if not keywords.has_key("NO_PUBKEY"):
1168 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1170 fingerprint = keywords["NO_PUBKEY"][0]
1171 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1172 # it'll try to create a lockfile in /dev. A better solution might
1173 # be a tempfile or something.
1174 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1175 % (Cnf["Dinstall::SigningKeyring"])
1176 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1177 % (keyring, keyserver, fingerprint)
1178 (result, output) = commands.getstatusoutput(cmd)
1180 return "'%s' failed with exit code %s" % (cmd, result)
1184 ################################################################################
1186 def gpg_keyring_args(keyrings=None):
1188 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1190 return " ".join(["--keyring %s" % x for x in keyrings])
1192 ################################################################################
1194 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1196 Check the signature of a file and return the fingerprint if the
1197 signature is valid or 'None' if it's not. The first argument is the
1198 filename whose signature should be checked. The second argument is a
1199 reject function and is called when an error is found. The reject()
1200 function must allow for two arguments: the first is the error message,
1201 the second is an optional prefix string. It's possible for reject()
1202 to be called more than once during an invocation of check_signature().
1203 The third argument is optional and is the name of the files the
1204 detached signature applies to. The fourth argument is optional and is
1205 a *list* of keyrings to use. 'autofetch' can either be None, True or
1206 False. If None, the default behaviour specified in the config will be
1210 # Ensure the filename contains no shell meta-characters or other badness
1211 if not re_taint_free.match(sig_filename):
1212 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1215 if data_filename and not re_taint_free.match(data_filename):
1216 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1220 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1222 # Autofetch the signing key if that's enabled
1223 if autofetch == None:
1224 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1226 error_msg = retrieve_key(sig_filename)
1231 # Build the command line
1232 status_read, status_write = os.pipe()
1233 cmd = "gpgv --status-fd %s %s %s %s" % (
1234 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1236 # Invoke gpgv on the file
1237 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1239 # Process the status-fd output
1240 (keywords, internal_error) = process_gpgv_output(status)
1242 # If we failed to parse the status-fd output, let's just whine and bail now
1244 reject("internal error while performing signature check on %s." % (sig_filename))
1245 reject(internal_error, "")
1246 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1250 # Now check for obviously bad things in the processed output
1251 if keywords.has_key("KEYREVOKED"):
1252 reject("The key used to sign %s has been revoked." % (sig_filename))
1254 if keywords.has_key("BADSIG"):
1255 reject("bad signature on %s." % (sig_filename))
1257 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1258 reject("failed to check signature on %s." % (sig_filename))
1260 if keywords.has_key("NO_PUBKEY"):
1261 args = keywords["NO_PUBKEY"]
1264 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1266 if keywords.has_key("BADARMOR"):
1267 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1269 if keywords.has_key("NODATA"):
1270 reject("no signature found in %s." % (sig_filename))
1272 if keywords.has_key("EXPKEYSIG"):
1273 args = keywords["EXPKEYSIG"]
1276 reject("Signature made by expired key 0x%s" % (key))
1278 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1279 args = keywords["KEYEXPIRED"]
1283 if timestamp.count("T") == 0:
1284 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1286 expiredate = timestamp
1287 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1293 # Next check gpgv exited with a zero return code
1295 reject("gpgv failed while checking %s." % (sig_filename))
1297 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1299 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1302 # Sanity check the good stuff we expect
1303 if not keywords.has_key("VALIDSIG"):
1304 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1307 args = keywords["VALIDSIG"]
1309 reject("internal error while checking signature on %s." % (sig_filename))
1312 fingerprint = args[0]
1313 if not keywords.has_key("GOODSIG"):
1314 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1316 if not keywords.has_key("SIG_ID"):
1317 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1320 # Finally ensure there's not something we don't recognise
1321 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1322 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1323 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1325 for keyword in keywords.keys():
1326 if not known_keywords.has_key(keyword):
1327 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1335 ################################################################################
1337 def gpg_get_key_addresses(fingerprint):
1338 """retreive email addresses from gpg key uids for a given fingerprint"""
1339 addresses = key_uid_email_cache.get(fingerprint)
1340 if addresses != None:
1343 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1344 % (gpg_keyring_args(), fingerprint)
1345 (result, output) = commands.getstatusoutput(cmd)
1347 for l in output.split('\n'):
1348 m = re_gpg_uid.match(l)
1350 addresses.add(m.group(1))
1351 key_uid_email_cache[fingerprint] = addresses
1354 ################################################################################
1356 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1358 def wrap(paragraph, max_length, prefix=""):
1362 words = paragraph.split()
1365 word_size = len(word)
1366 if word_size > max_length:
1368 s += line + '\n' + prefix
1369 s += word + '\n' + prefix
1372 new_length = len(line) + word_size + 1
1373 if new_length > max_length:
1374 s += line + '\n' + prefix
1387 ################################################################################
1389 def clean_symlink (src, dest, root):
1391 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1394 src = src.replace(root, '', 1)
1395 dest = dest.replace(root, '', 1)
1396 dest = os.path.dirname(dest)
1397 new_src = '../' * len(dest.split('/'))
1398 return new_src + src
1400 ################################################################################
1402 def temp_filename(directory=None, prefix="dak", suffix=""):
1404 Return a secure and unique filename by pre-creating it.
1405 If 'directory' is non-null, it will be the directory the file is pre-created in.
1406 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1407 If 'suffix' is non-null, the filename will end with it.
1409 Returns a pair (fd, name).
1412 return tempfile.mkstemp(suffix, prefix, directory)
1414 ################################################################################
1416 def is_email_alias(email):
1417 """ checks if the user part of the email is listed in the alias file """
1419 if alias_cache == None:
1420 aliasfn = which_alias_file()
1423 for l in open(aliasfn):
1424 alias_cache.add(l.split(':')[0])
1425 uid = email.split('@')[0]
1426 return uid in alias_cache
1428 ################################################################################
1432 Cnf = apt_pkg.newConfiguration()
1433 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1435 if which_conf_file() != default_config:
1436 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1438 ################################################################################