2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
30 from dak_exceptions import *
32 ################################################################################
34 re_comments = re.compile(r"\#.*")
35 re_no_epoch = re.compile(r"^\d+\:")
36 re_no_revision = re.compile(r"-[^-]+$")
37 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
38 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
39 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
40 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
42 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
43 re_multi_line_field = re.compile(r"^\s(.*)")
44 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
46 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
47 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
49 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
50 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
52 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
54 html_escaping = {'"':'"', '&':'&', '<':'<', '>':'>'}
55 re_html_escaping = re.compile('|'.join(map(re.escape, html_escaping.keys())))
57 default_config = "/etc/dak/dak.conf"
58 default_apt_config = "/etc/dak/apt.conf"
61 key_uid_email_cache = {}
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))]
67 ################################################################################
70 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
72 ################################################################################
74 def open_file(filename, mode='r'):
76 f = open(filename, mode)
78 raise CantOpenError, filename
81 ################################################################################
83 def our_raw_input(prompt=""):
85 sys.stdout.write(prompt)
91 sys.stderr.write("\nUser interrupt (^D).\n")
94 ################################################################################
96 def extract_component_from_section(section):
99 if section.find('/') != -1:
100 component = section.split('/')[0]
102 # Expand default component
104 if Cnf.has_key("Component::%s" % section):
109 return (section, component)
111 ################################################################################
113 def parse_deb822(contents, signing_rules=0):
117 # Split the lines in the input, keeping the linebreaks.
118 lines = contents.splitlines(True)
121 raise ParseChangesError, "[Empty changes file]"
123 # Reindex by line number so we can easily verify the format of
129 indexed_lines[index] = line[:-1]
133 num_of_lines = len(indexed_lines.keys())
136 while index < num_of_lines:
138 line = indexed_lines[index]
140 if signing_rules == 1:
142 if index > num_of_lines:
143 raise InvalidDscError, index
144 line = indexed_lines[index]
145 if not line.startswith("-----BEGIN PGP SIGNATURE"):
146 raise InvalidDscError, index
151 if line.startswith("-----BEGIN PGP SIGNATURE"):
153 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
155 if signing_rules == 1:
156 while index < num_of_lines and line != "":
158 line = indexed_lines[index]
160 # If we're not inside the signed data, don't process anything
161 if signing_rules >= 0 and not inside_signature:
163 slf = re_single_line_field.match(line)
165 field = slf.groups()[0].lower()
166 changes[field] = slf.groups()[1]
170 changes[field] += '\n'
172 mlf = re_multi_line_field.match(line)
175 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
176 if first == 1 and changes[field] != "":
177 changes[field] += '\n'
179 changes[field] += mlf.groups()[0] + '\n'
183 if signing_rules == 1 and inside_signature:
184 raise InvalidDscError, index
186 changes["filecontents"] = "".join(lines)
188 if changes.has_key("source"):
189 # Strip the source version in brackets from the source field,
190 # put it in the "source-version" field instead.
191 srcver = re_srchasver.search(changes["source"])
193 changes["source"] = srcver.group(1)
194 changes["source-version"] = srcver.group(2)
197 raise ParseChangesError, error
201 ################################################################################
203 def parse_changes(filename, signing_rules=0):
204 """Parses a changes file and returns a dictionary where each field is a
205 key. The mandatory first argument is the filename of the .changes
208 signing_rules is an optional argument:
210 o If signing_rules == -1, no signature is required.
211 o If signing_rules == 0 (the default), a signature is required.
212 o If signing_rules == 1, it turns on the same strict format checking
215 The rules for (signing_rules == 1)-mode are:
217 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
218 followed by any PGP header data and must end with a blank line.
220 o The data section must end with a blank line and must be followed by
221 "-----BEGIN PGP SIGNATURE-----".
224 changes_in = open_file(filename)
225 content = changes_in.read()
227 return parse_deb822(content, signing_rules)
229 ################################################################################
231 def hash_key(hashname):
232 return '%ssum' % hashname
234 ################################################################################
236 def create_hash(where, files, hashname, hashfunc):
237 """create_hash extends the passed files dict with the given hash by
238 iterating over all files on disk and passing them to the hashing
242 for f in files.keys():
244 file_handle = open_file(f)
245 except CantOpenError:
246 rejmsg.append("Could not open file %s for checksumming" % (f))
248 files[f][hash_key(hashname)] = hashfunc(file_handle)
253 ################################################################################
255 def check_hash(where, files, hashname, hashfunc):
256 """check_hash checks the given hash in the files dict against the actual
257 files on disk. The hash values need to be present consistently in
258 all file entries. It does not modify its input in any way."""
261 for f in files.keys():
265 file_handle = open_file(f)
267 # Check for the hash entry, to not trigger a KeyError.
268 if not files[f].has_key(hash_key(hashname)):
269 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
273 # Actually check the hash for correctness.
274 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
275 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
277 except CantOpenError:
278 # TODO: This happens when the file is in the pool.
279 # warn("Cannot open file %s" % f)
286 ################################################################################
288 def check_size(where, files):
289 """check_size checks the file sizes in the passed files dict against the
293 for f in files.keys():
298 # TODO: This happens when the file is in the pool.
302 actual_size = entry[stat.ST_SIZE]
303 size = int(files[f]["size"])
304 if size != actual_size:
305 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
306 % (f, actual_size, size, where))
309 ################################################################################
311 def check_hash_fields(what, manifest):
312 """check_hash_fields ensures that there are no checksum fields in the
313 given dict that we do not know about."""
316 hashes = map(lambda x: x[0], known_hashes)
317 for field in manifest:
318 if field.startswith("checksums-"):
319 hashname = field.split("-",1)[1]
320 if hashname not in hashes:
321 rejmsg.append("Unsupported checksum field for %s "\
322 "in %s" % (hashname, what))
325 ################################################################################
327 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
328 if format >= version:
329 # The version should contain the specified hash.
332 # Import hashes from the changes
333 rejmsg = parse_checksums(".changes", files, changes, hashname)
337 # We need to calculate the hash because it can't possibly
340 return func(".changes", files, hashname, hashfunc)
342 # We could add the orig which might be in the pool to the files dict to
343 # access the checksums easily.
345 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
346 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
347 in the dsc is correct, i.e. identical to the changes file and if necessary
348 the pool. The latter task is delegated to check_hash."""
351 if not dsc.has_key('Checksums-%s' % (hashname,)):
353 # Import hashes from the dsc
354 parse_checksums(".dsc", dsc_files, dsc, hashname)
356 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
359 ################################################################################
361 def ensure_hashes(changes, dsc, files, dsc_files):
364 # Make sure we recognise the format of the Files: field in the .changes
365 format = changes.get("format", "0.0").split(".", 1)
367 format = int(format[0]), int(format[1])
369 format = int(float(format[0])), 0
371 # We need to deal with the original changes blob, as the fields we need
372 # might not be in the changes dict serialised into the .dak anymore.
373 orig_changes = parse_deb822(changes['filecontents'])
375 # Copy the checksums over to the current changes dict. This will keep
376 # the existing modifications to it intact.
377 for field in orig_changes:
378 if field.startswith('checksums-'):
379 changes[field] = orig_changes[field]
381 # Check for unsupported hashes
382 rejmsg.extend(check_hash_fields(".changes", changes))
383 rejmsg.extend(check_hash_fields(".dsc", dsc))
385 # We have to calculate the hash if we have an earlier changes version than
386 # the hash appears in rather than require it exist in the changes file
387 for hashname, hashfunc, version in known_hashes:
388 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
390 if "source" in changes["architecture"]:
391 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
396 def parse_checksums(where, files, manifest, hashname):
398 field = 'checksums-%s' % hashname
399 if not field in manifest:
401 input = manifest[field]
402 for line in input.split('\n'):
405 hash, size, file = line.strip().split(' ')
406 if not files.has_key(file):
407 # TODO: check for the file's entry in the original files dict, not
408 # the one modified by (auto)byhand and other weird stuff
409 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
410 # (file, hashname, where))
412 if not files[file]["size"] == size:
413 rejmsg.append("%s: size differs for files and checksums-%s entry "\
414 "in %s" % (file, hashname, where))
416 files[file][hash_key(hashname)] = hash
417 for f in files.keys():
418 if not files[f].has_key(hash_key(hashname)):
419 rejmsg.append("%s: no entry in checksums-%s in %s" % (file,
423 ################################################################################
425 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
427 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
430 # Make sure we have a Files: field to parse...
431 if not changes.has_key(field):
432 raise NoFilesFieldError
434 # Make sure we recognise the format of the Files: field
435 format = re_verwithext.search(changes.get("format", "0.0"))
437 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
439 format = format.groups()
440 if format[1] == None:
441 format = int(float(format[0])), 0, format[2]
443 format = int(format[0]), int(format[1]), format[2]
444 if format[2] == None:
448 # format = (1,0) are the only formats we currently accept,
449 # format = (0,0) are missing format headers of which we still
450 # have some in the archive.
451 if format != (1,0) and format != (0,0):
452 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
454 if (format < (1,5) or format > (1,8)):
455 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
456 if field != "files" and format < (1,8):
457 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
459 includes_section = (not is_a_dsc) and field == "files"
461 # Parse each entry/line:
462 for i in changes[field].split('\n'):
466 section = priority = ""
469 (md5, size, section, priority, name) = s
471 (md5, size, name) = s
473 raise ParseChangesError, i
480 (section, component) = extract_component_from_section(section)
482 files[name] = Dict(size=size, section=section,
483 priority=priority, component=component)
484 files[name][hashname] = md5
488 ################################################################################
490 def force_to_utf8(s):
491 """Forces a string to UTF-8. If the string isn't already UTF-8,
492 it's assumed to be ISO-8859-1."""
497 latin1_s = unicode(s,'iso8859-1')
498 return latin1_s.encode('utf-8')
500 def rfc2047_encode(s):
501 """Encodes a (header) string per RFC2047 if necessary. If the
502 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
504 codecs.lookup('ascii')[1](s)
509 codecs.lookup('utf-8')[1](s)
510 h = email.Header.Header(s, 'utf-8', 998)
513 h = email.Header.Header(s, 'iso-8859-1', 998)
516 ################################################################################
518 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
519 # with it. I know - I'll fix the suckage and make things
522 def fix_maintainer (maintainer):
523 """Parses a Maintainer or Changed-By field and returns:
524 (1) an RFC822 compatible version,
525 (2) an RFC2047 compatible version,
529 The name is forced to UTF-8 for both (1) and (3). If the name field
530 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
531 switched to 'email (name)' format."""
532 maintainer = maintainer.strip()
534 return ('', '', '', '')
536 if maintainer.find("<") == -1:
539 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
540 email = maintainer[1:-1]
543 m = re_parse_maintainer.match(maintainer)
545 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
549 # Get an RFC2047 compliant version of the name
550 rfc2047_name = rfc2047_encode(name)
552 # Force the name to be UTF-8
553 name = force_to_utf8(name)
555 if name.find(',') != -1 or name.find('.') != -1:
556 rfc822_maint = "%s (%s)" % (email, name)
557 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
559 rfc822_maint = "%s <%s>" % (name, email)
560 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
562 if email.find("@") == -1 and email.find("buildd_") != 0:
563 raise ParseMaintError, "No @ found in email address part."
565 return (rfc822_maint, rfc2047_maint, name, email)
567 ################################################################################
569 # sendmail wrapper, takes _either_ a message string or a file as arguments
570 def send_mail (message, filename=""):
571 # If we've been passed a string dump it into a temporary file
573 filename = tempfile.mktemp()
574 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
575 os.write (fd, message)
579 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
581 raise SendmailFailedError, output
583 # Clean up any temporary files
587 ################################################################################
589 def poolify (source, component):
592 if source[:3] == "lib":
593 return component + source[:4] + '/' + source + '/'
595 return component + source[:1] + '/' + source + '/'
597 ################################################################################
599 def move (src, dest, overwrite = 0, perms = 0664):
600 if os.path.exists(dest) and os.path.isdir(dest):
603 dest_dir = os.path.dirname(dest)
604 if not os.path.exists(dest_dir):
605 umask = os.umask(00000)
606 os.makedirs(dest_dir, 02775)
608 #print "Moving %s to %s..." % (src, dest)
609 if os.path.exists(dest) and os.path.isdir(dest):
610 dest += '/' + os.path.basename(src)
611 # Don't overwrite unless forced to
612 if os.path.exists(dest):
614 fubar("Can't move %s to %s - file already exists." % (src, dest))
616 if not os.access(dest, os.W_OK):
617 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
618 shutil.copy2(src, dest)
619 os.chmod(dest, perms)
622 def copy (src, dest, overwrite = 0, perms = 0664):
623 if os.path.exists(dest) and os.path.isdir(dest):
626 dest_dir = os.path.dirname(dest)
627 if not os.path.exists(dest_dir):
628 umask = os.umask(00000)
629 os.makedirs(dest_dir, 02775)
631 #print "Copying %s to %s..." % (src, dest)
632 if os.path.exists(dest) and os.path.isdir(dest):
633 dest += '/' + os.path.basename(src)
634 # Don't overwrite unless forced to
635 if os.path.exists(dest):
637 raise FileExistsError
639 if not os.access(dest, os.W_OK):
640 raise CantOverwriteError
641 shutil.copy2(src, dest)
642 os.chmod(dest, perms)
644 ################################################################################
647 res = socket.gethostbyaddr(socket.gethostname())
648 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
649 if database_hostname:
650 return database_hostname
654 def which_conf_file ():
655 res = socket.gethostbyaddr(socket.gethostname())
656 if Cnf.get("Config::" + res[0] + "::DakConfig"):
657 return Cnf["Config::" + res[0] + "::DakConfig"]
659 return default_config
661 def which_apt_conf_file ():
662 res = socket.gethostbyaddr(socket.gethostname())
663 if Cnf.get("Config::" + res[0] + "::AptConfig"):
664 return Cnf["Config::" + res[0] + "::AptConfig"]
666 return default_apt_config
668 def which_alias_file():
669 hostname = socket.gethostbyaddr(socket.gethostname())[0]
670 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
671 if os.path.exists(aliasfn):
676 ################################################################################
678 # Escape characters which have meaning to SQL's regex comparison operator ('~')
679 # (woefully incomplete)
682 s = s.replace('+', '\\\\+')
683 s = s.replace('.', '\\\\.')
686 ################################################################################
688 # Perform a substition of template
689 def TemplateSubst(map, filename):
690 file = open_file(filename)
691 template = file.read()
693 template = template.replace(x,map[x])
697 ################################################################################
699 def fubar(msg, exit_code=1):
700 sys.stderr.write("E: %s\n" % (msg))
704 sys.stderr.write("W: %s\n" % (msg))
706 ################################################################################
708 # Returns the user name with a laughable attempt at rfc822 conformancy
709 # (read: removing stray periods).
711 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
713 ################################################################################
723 return ("%d%s" % (c, t))
725 ################################################################################
727 def cc_fix_changes (changes):
728 o = changes.get("architecture", "")
730 del changes["architecture"]
731 changes["architecture"] = {}
733 changes["architecture"][j] = 1
735 # Sort by source name, source version, 'have source', and then by filename
736 def changes_compare (a, b):
738 a_changes = parse_changes(a)
743 b_changes = parse_changes(b)
747 cc_fix_changes (a_changes)
748 cc_fix_changes (b_changes)
750 # Sort by source name
751 a_source = a_changes.get("source")
752 b_source = b_changes.get("source")
753 q = cmp (a_source, b_source)
757 # Sort by source version
758 a_version = a_changes.get("version", "0")
759 b_version = b_changes.get("version", "0")
760 q = apt_pkg.VersionCompare(a_version, b_version)
764 # Sort by 'have source'
765 a_has_source = a_changes["architecture"].get("source")
766 b_has_source = b_changes["architecture"].get("source")
767 if a_has_source and not b_has_source:
769 elif b_has_source and not a_has_source:
772 # Fall back to sort by filename
775 ################################################################################
777 def find_next_free (dest, too_many=100):
780 while os.path.exists(dest) and extra < too_many:
781 dest = orig_dest + '.' + repr(extra)
783 if extra >= too_many:
784 raise NoFreeFilenameError
787 ################################################################################
789 def result_join (original, sep = '\t'):
791 for i in xrange(len(original)):
792 if original[i] == None:
795 list.append(original[i])
796 return sep.join(list)
798 ################################################################################
800 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
802 for line in str.split('\n'):
804 if line or include_blank_lines:
805 out += "%s%s\n" % (prefix, line)
806 # Strip trailing new line
811 ################################################################################
813 def validate_changes_file_arg(filename, require_changes=1):
814 """'filename' is either a .changes or .dak file. If 'filename' is a
815 .dak file, it's changed to be the corresponding .changes file. The
816 function then checks if the .changes file a) exists and b) is
817 readable and returns the .changes filename if so. If there's a
818 problem, the next action depends on the option 'require_changes'
821 o If 'require_changes' == -1, errors are ignored and the .changes
822 filename is returned.
823 o If 'require_changes' == 0, a warning is given and 'None' is returned.
824 o If 'require_changes' == 1, a fatal error is raised.
828 orig_filename = filename
829 if filename.endswith(".dak"):
830 filename = filename[:-4]+".changes"
832 if not filename.endswith(".changes"):
833 error = "invalid file type; not a changes file"
835 if not os.access(filename,os.R_OK):
836 if os.path.exists(filename):
837 error = "permission denied"
839 error = "file not found"
842 if require_changes == 1:
843 fubar("%s: %s." % (orig_filename, error))
844 elif require_changes == 0:
845 warn("Skipping %s - %s" % (orig_filename, error))
847 else: # We only care about the .dak file
852 ################################################################################
855 return (arch != "source" and arch != "all")
857 ################################################################################
859 def join_with_commas_and(list):
860 if len(list) == 0: return "nothing"
861 if len(list) == 1: return list[0]
862 return ", ".join(list[:-1]) + " and " + list[-1]
864 ################################################################################
869 (pkg, version, constraint) = atom
871 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
874 pp_deps.append(pp_dep)
875 return " |".join(pp_deps)
877 ################################################################################
882 ################################################################################
884 # Handle -a, -c and -s arguments; returns them as SQL constraints
885 def parse_args(Options):
889 for suite in split_args(Options["Suite"]):
890 suite_id = database.get_suite_id(suite)
892 warn("suite '%s' not recognised." % (suite))
894 suite_ids_list.append(suite_id)
896 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
898 fubar("No valid suite given.")
903 if Options["Component"]:
904 component_ids_list = []
905 for component in split_args(Options["Component"]):
906 component_id = database.get_component_id(component)
907 if component_id == -1:
908 warn("component '%s' not recognised." % (component))
910 component_ids_list.append(component_id)
911 if component_ids_list:
912 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
914 fubar("No valid component given.")
918 # Process architecture
919 con_architectures = ""
920 if Options["Architecture"]:
923 for architecture in split_args(Options["Architecture"]):
924 if architecture == "source":
927 architecture_id = database.get_architecture_id(architecture)
928 if architecture_id == -1:
929 warn("architecture '%s' not recognised." % (architecture))
931 arch_ids_list.append(architecture_id)
933 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
936 fubar("No valid architecture given.")
940 return (con_suites, con_architectures, con_components, check_source)
942 ################################################################################
944 # Inspired(tm) by Bryn Keller's print_exc_plus (See
945 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
948 tb = sys.exc_info()[2]
957 traceback.print_exc()
959 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
960 frame.f_code.co_filename,
962 for key, value in frame.f_locals.items():
963 print "\t%20s = " % key,
967 print "<unable to print>"
969 ################################################################################
971 def try_with_debug(function):
979 ################################################################################
981 # Function for use in sorting lists of architectures.
982 # Sorts normally except that 'source' dominates all others.
984 def arch_compare_sw (a, b):
985 if a == "source" and b == "source":
994 ################################################################################
996 # Split command line arguments which can be separated by either commas
997 # or whitespace. If dwim is set, it will complain about string ending
998 # in comma since this usually means someone did 'dak ls -a i386, m68k
999 # foo' or something and the inevitable confusion resulting from 'm68k'
1000 # being treated as an argument is undesirable.
1002 def split_args (s, dwim=1):
1003 if s.find(",") == -1:
1006 if s[-1:] == "," and dwim:
1007 fubar("split_args: found trailing comma, spurious space maybe?")
1010 ################################################################################
1012 def Dict(**dict): return dict
1014 ########################################
1016 # Our very own version of commands.getouputstatus(), hacked to support
1018 def gpgv_get_status_output(cmd, status_read, status_write):
1019 cmd = ['/bin/sh', '-c', cmd]
1020 p2cread, p2cwrite = os.pipe()
1021 c2pread, c2pwrite = os.pipe()
1022 errout, errin = os.pipe()
1032 for i in range(3, 256):
1033 if i != status_write:
1039 os.execvp(cmd[0], cmd)
1045 os.dup2(c2pread, c2pwrite)
1046 os.dup2(errout, errin)
1048 output = status = ""
1050 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1053 r = os.read(fd, 8196)
1055 more_data.append(fd)
1056 if fd == c2pwrite or fd == errin:
1058 elif fd == status_read:
1061 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1063 pid, exit_status = os.waitpid(pid, 0)
1065 os.close(status_write)
1066 os.close(status_read)
1076 return output, status, exit_status
1078 ################################################################################
1080 def process_gpgv_output(status):
1081 # Process the status-fd output
1084 for line in status.split('\n'):
1088 split = line.split()
1090 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1092 (gnupg, keyword) = split[:2]
1093 if gnupg != "[GNUPG:]":
1094 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1097 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1098 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1101 keywords[keyword] = args
1103 return (keywords, internal_error)
1105 ################################################################################
1107 def retrieve_key (filename, keyserver=None, keyring=None):
1108 """Retrieve the key that signed 'filename' from 'keyserver' and
1109 add it to 'keyring'. Returns nothing on success, or an error message
1112 # Defaults for keyserver and keyring
1114 keyserver = Cnf["Dinstall::KeyServer"]
1116 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1118 # Ensure the filename contains no shell meta-characters or other badness
1119 if not re_taint_free.match(filename):
1120 return "%s: tainted filename" % (filename)
1122 # Invoke gpgv on the file
1123 status_read, status_write = os.pipe();
1124 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1125 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1127 # Process the status-fd output
1128 (keywords, internal_error) = process_gpgv_output(status)
1130 return internal_error
1132 if not keywords.has_key("NO_PUBKEY"):
1133 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1135 fingerprint = keywords["NO_PUBKEY"][0]
1136 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1137 # it'll try to create a lockfile in /dev. A better solution might
1138 # be a tempfile or something.
1139 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1140 % (Cnf["Dinstall::SigningKeyring"])
1141 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1142 % (keyring, keyserver, fingerprint)
1143 (result, output) = commands.getstatusoutput(cmd)
1145 return "'%s' failed with exit code %s" % (cmd, result)
1149 ################################################################################
1151 def gpg_keyring_args(keyrings=None):
1153 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1155 return " ".join(["--keyring %s" % x for x in keyrings])
1157 ################################################################################
1159 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1160 """Check the signature of a file and return the fingerprint if the
1161 signature is valid or 'None' if it's not. The first argument is the
1162 filename whose signature should be checked. The second argument is a
1163 reject function and is called when an error is found. The reject()
1164 function must allow for two arguments: the first is the error message,
1165 the second is an optional prefix string. It's possible for reject()
1166 to be called more than once during an invocation of check_signature().
1167 The third argument is optional and is the name of the files the
1168 detached signature applies to. The fourth argument is optional and is
1169 a *list* of keyrings to use. 'autofetch' can either be None, True or
1170 False. If None, the default behaviour specified in the config will be
1173 # Ensure the filename contains no shell meta-characters or other badness
1174 if not re_taint_free.match(sig_filename):
1175 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1178 if data_filename and not re_taint_free.match(data_filename):
1179 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1183 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1185 # Autofetch the signing key if that's enabled
1186 if autofetch == None:
1187 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1189 error_msg = retrieve_key(sig_filename)
1194 # Build the command line
1195 status_read, status_write = os.pipe();
1196 cmd = "gpgv --status-fd %s %s %s %s" % (
1197 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1199 # Invoke gpgv on the file
1200 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1202 # Process the status-fd output
1203 (keywords, internal_error) = process_gpgv_output(status)
1205 # If we failed to parse the status-fd output, let's just whine and bail now
1207 reject("internal error while performing signature check on %s." % (sig_filename))
1208 reject(internal_error, "")
1209 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1213 # Now check for obviously bad things in the processed output
1214 if keywords.has_key("KEYREVOKED"):
1215 reject("The key used to sign %s has been revoked." % (sig_filename))
1217 if keywords.has_key("BADSIG"):
1218 reject("bad signature on %s." % (sig_filename))
1220 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1221 reject("failed to check signature on %s." % (sig_filename))
1223 if keywords.has_key("NO_PUBKEY"):
1224 args = keywords["NO_PUBKEY"]
1227 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1229 if keywords.has_key("BADARMOR"):
1230 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1232 if keywords.has_key("NODATA"):
1233 reject("no signature found in %s." % (sig_filename))
1235 if keywords.has_key("EXPKEYSIG"):
1236 args = keywords["EXPKEYSIG"]
1239 reject("Signature made by expired key 0x%s" % (key))
1241 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1242 args = keywords["KEYEXPIRED"]
1246 if timestamp.count("T") == 0:
1247 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1249 expiredate = timestamp
1250 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1256 # Next check gpgv exited with a zero return code
1258 reject("gpgv failed while checking %s." % (sig_filename))
1260 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1262 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1265 # Sanity check the good stuff we expect
1266 if not keywords.has_key("VALIDSIG"):
1267 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1270 args = keywords["VALIDSIG"]
1272 reject("internal error while checking signature on %s." % (sig_filename))
1275 fingerprint = args[0]
1276 if not keywords.has_key("GOODSIG"):
1277 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1279 if not keywords.has_key("SIG_ID"):
1280 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1283 # Finally ensure there's not something we don't recognise
1284 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1285 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1286 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1288 for keyword in keywords.keys():
1289 if not known_keywords.has_key(keyword):
1290 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1298 ################################################################################
1300 def gpg_get_key_addresses(fingerprint):
1301 """retreive email addresses from gpg key uids for a given fingerprint"""
1302 addresses = key_uid_email_cache.get(fingerprint)
1303 if addresses != None:
1306 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1307 % (gpg_keyring_args(), fingerprint)
1308 (result, output) = commands.getstatusoutput(cmd)
1310 for l in output.split('\n'):
1311 m = re_gpg_uid.match(l)
1313 addresses.add(m.group(1))
1314 key_uid_email_cache[fingerprint] = addresses
1317 ################################################################################
1319 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1321 def wrap(paragraph, max_length, prefix=""):
1325 words = paragraph.split()
1328 word_size = len(word)
1329 if word_size > max_length:
1331 s += line + '\n' + prefix
1332 s += word + '\n' + prefix
1335 new_length = len(line) + word_size + 1
1336 if new_length > max_length:
1337 s += line + '\n' + prefix
1350 ################################################################################
1352 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1353 # Returns fixed 'src'
1354 def clean_symlink (src, dest, root):
1355 src = src.replace(root, '', 1)
1356 dest = dest.replace(root, '', 1)
1357 dest = os.path.dirname(dest)
1358 new_src = '../' * len(dest.split('/'))
1359 return new_src + src
1361 ################################################################################
1363 def temp_filename(directory=None, dotprefix=None, perms=0700):
1364 """Return a secure and unique filename by pre-creating it.
1365 If 'directory' is non-null, it will be the directory the file is pre-created in.
1366 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1369 old_tempdir = tempfile.tempdir
1370 tempfile.tempdir = directory
1372 filename = tempfile.mktemp()
1375 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1376 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1380 tempfile.tempdir = old_tempdir
1384 ################################################################################
1386 # checks if the user part of the email is listed in the alias file
1388 def is_email_alias(email):
1390 if alias_cache == None:
1391 aliasfn = which_alias_file()
1394 for l in open(aliasfn):
1395 alias_cache.add(l.split(':')[0])
1396 uid = email.split('@')[0]
1397 return uid in alias_cache
1399 ################################################################################
1403 Cnf = apt_pkg.newConfiguration()
1404 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1406 if which_conf_file() != default_config:
1407 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1409 ################################################################################