2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
29 from dak_exceptions import *
31 ################################################################################
33 re_comments = re.compile(r"\#.*")
34 re_no_epoch = re.compile(r"^\d+\:")
35 re_no_revision = re.compile(r"-[^-]+$")
36 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
37 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
38 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
39 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
41 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
42 re_multi_line_field = re.compile(r"^\s(.*)")
43 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
45 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
46 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
48 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
49 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
51 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
53 html_escaping = {'"':'"', '&':'&', '<':'<', '>':'>'}
54 re_html_escaping = re.compile('|'.join(map(re.escape, html_escaping.keys())))
56 default_config = "/etc/dak/dak.conf"
57 default_apt_config = "/etc/dak/apt.conf"
60 key_uid_email_cache = {}
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))]
66 ################################################################################
69 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
71 ################################################################################
73 def open_file(filename, mode='r'):
75 f = open(filename, mode)
77 raise CantOpenError, filename
80 ################################################################################
82 def our_raw_input(prompt=""):
84 sys.stdout.write(prompt)
90 sys.stderr.write("\nUser interrupt (^D).\n")
93 ################################################################################
95 def extract_component_from_section(section):
98 if section.find('/') != -1:
99 component = section.split('/')[0]
101 # Expand default component
103 if Cnf.has_key("Component::%s" % section):
108 return (section, component)
110 ################################################################################
112 def parse_deb822(contents, signing_rules=0):
116 # Split the lines in the input, keeping the linebreaks.
117 lines = contents.splitlines(True)
120 raise ParseChangesError, "[Empty changes file]"
122 # Reindex by line number so we can easily verify the format of
128 indexed_lines[index] = line[:-1]
132 num_of_lines = len(indexed_lines.keys())
135 while index < num_of_lines:
137 line = indexed_lines[index]
139 if signing_rules == 1:
141 if index > num_of_lines:
142 raise InvalidDscError, index
143 line = indexed_lines[index]
144 if not line.startswith("-----BEGIN PGP SIGNATURE"):
145 raise InvalidDscError, index
150 if line.startswith("-----BEGIN PGP SIGNATURE"):
152 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
154 if signing_rules == 1:
155 while index < num_of_lines and line != "":
157 line = indexed_lines[index]
159 # If we're not inside the signed data, don't process anything
160 if signing_rules >= 0 and not inside_signature:
162 slf = re_single_line_field.match(line)
164 field = slf.groups()[0].lower()
165 changes[field] = slf.groups()[1]
169 changes[field] += '\n'
171 mlf = re_multi_line_field.match(line)
174 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
175 if first == 1 and changes[field] != "":
176 changes[field] += '\n'
178 changes[field] += mlf.groups()[0] + '\n'
182 if signing_rules == 1 and inside_signature:
183 raise InvalidDscError, index
185 changes["filecontents"] = "".join(lines)
187 if changes.has_key("source"):
188 # Strip the source version in brackets from the source field,
189 # put it in the "source-version" field instead.
190 srcver = re_srchasver.search(changes["source"])
192 changes["source"] = srcver.group(1)
193 changes["source-version"] = srcver.group(2)
196 raise ParseChangesError, error
200 ################################################################################
202 def parse_changes(filename, signing_rules=0):
203 """Parses a changes file and returns a dictionary where each field is a
204 key. The mandatory first argument is the filename of the .changes
207 signing_rules is an optional argument:
209 o If signing_rules == -1, no signature is required.
210 o If signing_rules == 0 (the default), a signature is required.
211 o If signing_rules == 1, it turns on the same strict format checking
214 The rules for (signing_rules == 1)-mode are:
216 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
217 followed by any PGP header data and must end with a blank line.
219 o The data section must end with a blank line and must be followed by
220 "-----BEGIN PGP SIGNATURE-----".
223 changes_in = open_file(filename)
224 content = changes_in.read()
226 return parse_deb822(content, signing_rules)
228 ################################################################################
230 def hash_key(hashname):
231 return '%ssum' % hashname
233 ################################################################################
235 def create_hash(where, files, hashname, hashfunc):
236 """create_hash extends the passed files dict with the given hash by
237 iterating over all files on disk and passing them to the hashing
241 for f in files.keys():
243 file_handle = open_file(f)
244 except CantOpenError:
245 rejmsg.append("Could not open file %s for checksumming" % (f))
247 files[f][hash_key(hashname)] = hashfunc(file_handle)
252 ################################################################################
254 def check_hash(where, files, hashname, hashfunc):
255 """check_hash checks the given hash in the files dict against the actual
256 files on disk. The hash values need to be present consistently in
257 all file entries. It does not modify its input in any way."""
260 for f in files.keys():
264 file_handle = open_file(f)
266 # Check for the hash entry, to not trigger a KeyError.
267 if not files[f].has_key(hash_key(hashname)):
268 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
272 # Actually check the hash for correctness.
273 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
274 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
276 except CantOpenError:
277 # TODO: This happens when the file is in the pool.
278 # warn("Cannot open file %s" % f)
285 ################################################################################
287 def check_size(where, files):
288 """check_size checks the file sizes in the passed files dict against the
292 for f in files.keys():
297 # TODO: This happens when the file is in the pool.
301 actual_size = entry[stat.ST_SIZE]
302 size = int(files[f]["size"])
303 if size != actual_size:
304 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
305 % (f, actual_size, size, where))
308 ################################################################################
310 def check_hash_fields(what, manifest):
311 """check_hash_fields ensures that there are no checksum fields in the
312 given dict that we do not know about."""
315 hashes = map(lambda x: x[0], known_hashes)
316 for field in manifest:
317 if field.startswith("checksums-"):
318 hashname = field.split("-",1)[1]
319 if hashname not in hashes:
320 rejmsg.append("Unsupported checksum field for %s "\
321 "in %s" % (hashname, what))
324 ################################################################################
326 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
327 if format >= version:
328 # The version should contain the specified hash.
331 # Import hashes from the changes
332 rejmsg = parse_checksums(".changes", files, changes, hashname)
336 # We need to calculate the hash because it can't possibly
339 return func(".changes", files, hashname, hashfunc)
341 # We could add the orig which might be in the pool to the files dict to
342 # access the checksums easily.
344 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
345 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
346 in the dsc is correct, i.e. identical to the changes file and if necessary
347 the pool. The latter task is delegated to check_hash."""
350 if not dsc.has_key('Checksums-%s' % (hashname,)):
352 # Import hashes from the dsc
353 parse_checksums(".dsc", dsc_files, dsc, hashname)
355 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
358 ################################################################################
360 def ensure_hashes(changes, dsc, files, dsc_files):
363 # Make sure we recognise the format of the Files: field in the .changes
364 format = changes.get("format", "0.0").split(".", 1)
366 format = int(format[0]), int(format[1])
368 format = int(float(format[0])), 0
370 # We need to deal with the original changes blob, as the fields we need
371 # might not be in the changes dict serialised into the .dak anymore.
372 orig_changes = parse_deb822(changes['filecontents'])
374 # Copy the checksums over to the current changes dict. This will keep
375 # the existing modifications to it intact.
376 for field in orig_changes:
377 if field.startswith('checksums-'):
378 changes[field] = orig_changes[field]
380 # Check for unsupported hashes
381 rejmsg.extend(check_hash_fields(".changes", changes))
382 rejmsg.extend(check_hash_fields(".dsc", dsc))
384 # We have to calculate the hash if we have an earlier changes version than
385 # the hash appears in rather than require it exist in the changes file
386 for hashname, hashfunc, version in known_hashes:
387 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
389 if "source" in changes["architecture"]:
390 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
395 def parse_checksums(where, files, manifest, hashname):
397 field = 'checksums-%s' % hashname
398 if not field in manifest:
400 input = manifest[field]
401 for line in input.split('\n'):
404 hash, size, file = line.strip().split(' ')
405 if not files.has_key(file):
406 # TODO: check for the file's entry in the original files dict, not
407 # the one modified by (auto)byhand and other weird stuff
408 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
409 # (file, hashname, where))
411 if not files[file]["size"] == size:
412 rejmsg.append("%s: size differs for files and checksums-%s entry "\
413 "in %s" % (file, hashname, where))
415 files[file][hash_key(hashname)] = hash
416 for f in files.keys():
417 if not files[f].has_key(hash_key(hashname)):
418 rejmsg.append("%s: no entry in checksums-%s in %s" % (file,
422 ################################################################################
424 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
426 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
429 # Make sure we have a Files: field to parse...
430 if not changes.has_key(field):
431 raise NoFilesFieldError
433 # Make sure we recognise the format of the Files: field
434 format = re_verwithext.search(changes.get("format", "0.0"))
436 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
438 format = format.groups()
439 if format[1] == None:
440 format = int(float(format[0])), 0, format[2]
442 format = int(format[0]), int(format[1]), format[2]
443 if format[2] == None:
447 # format = (1,0) are the only formats we currently accept,
448 # format = (0,0) are missing format headers of which we still
449 # have some in the archive.
450 if format != (1,0) and format != (0,0):
451 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
453 if (format < (1,5) or format > (1,8)):
454 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
455 if field != "files" and format < (1,8):
456 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
458 includes_section = (not is_a_dsc) and field == "files"
460 # Parse each entry/line:
461 for i in changes[field].split('\n'):
465 section = priority = ""
468 (md5, size, section, priority, name) = s
470 (md5, size, name) = s
472 raise ParseChangesError, i
479 (section, component) = extract_component_from_section(section)
481 files[name] = Dict(size=size, section=section,
482 priority=priority, component=component)
483 files[name][hashname] = md5
487 ################################################################################
489 def force_to_utf8(s):
490 """Forces a string to UTF-8. If the string isn't already UTF-8,
491 it's assumed to be ISO-8859-1."""
496 latin1_s = unicode(s,'iso8859-1')
497 return latin1_s.encode('utf-8')
499 def rfc2047_encode(s):
500 """Encodes a (header) string per RFC2047 if necessary. If the
501 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
503 codecs.lookup('ascii')[1](s)
508 codecs.lookup('utf-8')[1](s)
509 h = email.Header.Header(s, 'utf-8', 998)
512 h = email.Header.Header(s, 'iso-8859-1', 998)
515 ################################################################################
517 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
518 # with it. I know - I'll fix the suckage and make things
521 def fix_maintainer (maintainer):
522 """Parses a Maintainer or Changed-By field and returns:
523 (1) an RFC822 compatible version,
524 (2) an RFC2047 compatible version,
528 The name is forced to UTF-8 for both (1) and (3). If the name field
529 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
530 switched to 'email (name)' format."""
531 maintainer = maintainer.strip()
533 return ('', '', '', '')
535 if maintainer.find("<") == -1:
538 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
539 email = maintainer[1:-1]
542 m = re_parse_maintainer.match(maintainer)
544 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
548 # Get an RFC2047 compliant version of the name
549 rfc2047_name = rfc2047_encode(name)
551 # Force the name to be UTF-8
552 name = force_to_utf8(name)
554 if name.find(',') != -1 or name.find('.') != -1:
555 rfc822_maint = "%s (%s)" % (email, name)
556 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
558 rfc822_maint = "%s <%s>" % (name, email)
559 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
561 if email.find("@") == -1 and email.find("buildd_") != 0:
562 raise ParseMaintError, "No @ found in email address part."
564 return (rfc822_maint, rfc2047_maint, name, email)
566 ################################################################################
568 # sendmail wrapper, takes _either_ a message string or a file as arguments
569 def send_mail (message, filename=""):
570 # If we've been passed a string dump it into a temporary file
572 filename = tempfile.mktemp()
573 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
574 os.write (fd, message)
578 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
580 raise SendmailFailedError, output
582 # Clean up any temporary files
586 ################################################################################
588 def poolify (source, component):
591 if source[:3] == "lib":
592 return component + source[:4] + '/' + source + '/'
594 return component + source[:1] + '/' + source + '/'
596 ################################################################################
598 def move (src, dest, overwrite = 0, perms = 0664):
599 if os.path.exists(dest) and os.path.isdir(dest):
602 dest_dir = os.path.dirname(dest)
603 if not os.path.exists(dest_dir):
604 umask = os.umask(00000)
605 os.makedirs(dest_dir, 02775)
607 #print "Moving %s to %s..." % (src, dest)
608 if os.path.exists(dest) and os.path.isdir(dest):
609 dest += '/' + os.path.basename(src)
610 # Don't overwrite unless forced to
611 if os.path.exists(dest):
613 fubar("Can't move %s to %s - file already exists." % (src, dest))
615 if not os.access(dest, os.W_OK):
616 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
617 shutil.copy2(src, dest)
618 os.chmod(dest, perms)
621 def copy (src, dest, overwrite = 0, perms = 0664):
622 if os.path.exists(dest) and os.path.isdir(dest):
625 dest_dir = os.path.dirname(dest)
626 if not os.path.exists(dest_dir):
627 umask = os.umask(00000)
628 os.makedirs(dest_dir, 02775)
630 #print "Copying %s to %s..." % (src, dest)
631 if os.path.exists(dest) and os.path.isdir(dest):
632 dest += '/' + os.path.basename(src)
633 # Don't overwrite unless forced to
634 if os.path.exists(dest):
636 raise FileExistsError
638 if not os.access(dest, os.W_OK):
639 raise CantOverwriteError
640 shutil.copy2(src, dest)
641 os.chmod(dest, perms)
643 ################################################################################
646 res = socket.gethostbyaddr(socket.gethostname())
647 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
648 if database_hostname:
649 return database_hostname
653 def which_conf_file ():
654 res = socket.gethostbyaddr(socket.gethostname())
655 if Cnf.get("Config::" + res[0] + "::DakConfig"):
656 return Cnf["Config::" + res[0] + "::DakConfig"]
658 return default_config
660 def which_apt_conf_file ():
661 res = socket.gethostbyaddr(socket.gethostname())
662 if Cnf.get("Config::" + res[0] + "::AptConfig"):
663 return Cnf["Config::" + res[0] + "::AptConfig"]
665 return default_apt_config
667 def which_alias_file():
668 hostname = socket.gethostbyaddr(socket.gethostname())[0]
669 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
670 if os.path.exists(aliasfn):
675 ################################################################################
677 # Escape characters which have meaning to SQL's regex comparison operator ('~')
678 # (woefully incomplete)
681 s = s.replace('+', '\\\\+')
682 s = s.replace('.', '\\\\.')
685 ################################################################################
687 # Perform a substition of template
688 def TemplateSubst(map, filename):
689 file = open_file(filename)
690 template = file.read()
692 template = template.replace(x,map[x])
696 ################################################################################
698 def fubar(msg, exit_code=1):
699 sys.stderr.write("E: %s\n" % (msg))
703 sys.stderr.write("W: %s\n" % (msg))
705 ################################################################################
707 # Returns the user name with a laughable attempt at rfc822 conformancy
708 # (read: removing stray periods).
710 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
712 ################################################################################
722 return ("%d%s" % (c, t))
724 ################################################################################
726 def cc_fix_changes (changes):
727 o = changes.get("architecture", "")
729 del changes["architecture"]
730 changes["architecture"] = {}
732 changes["architecture"][j] = 1
734 # Sort by source name, source version, 'have source', and then by filename
735 def changes_compare (a, b):
737 a_changes = parse_changes(a)
742 b_changes = parse_changes(b)
746 cc_fix_changes (a_changes)
747 cc_fix_changes (b_changes)
749 # Sort by source name
750 a_source = a_changes.get("source")
751 b_source = b_changes.get("source")
752 q = cmp (a_source, b_source)
756 # Sort by source version
757 a_version = a_changes.get("version", "0")
758 b_version = b_changes.get("version", "0")
759 q = apt_pkg.VersionCompare(a_version, b_version)
763 # Sort by 'have source'
764 a_has_source = a_changes["architecture"].get("source")
765 b_has_source = b_changes["architecture"].get("source")
766 if a_has_source and not b_has_source:
768 elif b_has_source and not a_has_source:
771 # Fall back to sort by filename
774 ################################################################################
776 def find_next_free (dest, too_many=100):
779 while os.path.exists(dest) and extra < too_many:
780 dest = orig_dest + '.' + repr(extra)
782 if extra >= too_many:
783 raise NoFreeFilenameError
786 ################################################################################
788 def result_join (original, sep = '\t'):
790 for i in xrange(len(original)):
791 if original[i] == None:
794 list.append(original[i])
795 return sep.join(list)
797 ################################################################################
799 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
801 for line in str.split('\n'):
803 if line or include_blank_lines:
804 out += "%s%s\n" % (prefix, line)
805 # Strip trailing new line
810 ################################################################################
812 def validate_changes_file_arg(filename, require_changes=1):
813 """'filename' is either a .changes or .dak file. If 'filename' is a
814 .dak file, it's changed to be the corresponding .changes file. The
815 function then checks if the .changes file a) exists and b) is
816 readable and returns the .changes filename if so. If there's a
817 problem, the next action depends on the option 'require_changes'
820 o If 'require_changes' == -1, errors are ignored and the .changes
821 filename is returned.
822 o If 'require_changes' == 0, a warning is given and 'None' is returned.
823 o If 'require_changes' == 1, a fatal error is raised.
827 orig_filename = filename
828 if filename.endswith(".dak"):
829 filename = filename[:-4]+".changes"
831 if not filename.endswith(".changes"):
832 error = "invalid file type; not a changes file"
834 if not os.access(filename,os.R_OK):
835 if os.path.exists(filename):
836 error = "permission denied"
838 error = "file not found"
841 if require_changes == 1:
842 fubar("%s: %s." % (orig_filename, error))
843 elif require_changes == 0:
844 warn("Skipping %s - %s" % (orig_filename, error))
846 else: # We only care about the .dak file
851 ################################################################################
854 return (arch != "source" and arch != "all")
856 ################################################################################
858 def join_with_commas_and(list):
859 if len(list) == 0: return "nothing"
860 if len(list) == 1: return list[0]
861 return ", ".join(list[:-1]) + " and " + list[-1]
863 ################################################################################
868 (pkg, version, constraint) = atom
870 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
873 pp_deps.append(pp_dep)
874 return " |".join(pp_deps)
876 ################################################################################
881 ################################################################################
883 # Handle -a, -c and -s arguments; returns them as SQL constraints
884 def parse_args(Options):
888 for suite in split_args(Options["Suite"]):
889 suite_id = database.get_suite_id(suite)
891 warn("suite '%s' not recognised." % (suite))
893 suite_ids_list.append(suite_id)
895 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
897 fubar("No valid suite given.")
902 if Options["Component"]:
903 component_ids_list = []
904 for component in split_args(Options["Component"]):
905 component_id = database.get_component_id(component)
906 if component_id == -1:
907 warn("component '%s' not recognised." % (component))
909 component_ids_list.append(component_id)
910 if component_ids_list:
911 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
913 fubar("No valid component given.")
917 # Process architecture
918 con_architectures = ""
919 if Options["Architecture"]:
922 for architecture in split_args(Options["Architecture"]):
923 if architecture == "source":
926 architecture_id = database.get_architecture_id(architecture)
927 if architecture_id == -1:
928 warn("architecture '%s' not recognised." % (architecture))
930 arch_ids_list.append(architecture_id)
932 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
935 fubar("No valid architecture given.")
939 return (con_suites, con_architectures, con_components, check_source)
941 ################################################################################
943 # Inspired(tm) by Bryn Keller's print_exc_plus (See
944 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
947 tb = sys.exc_info()[2]
956 traceback.print_exc()
958 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
959 frame.f_code.co_filename,
961 for key, value in frame.f_locals.items():
962 print "\t%20s = " % key,
966 print "<unable to print>"
968 ################################################################################
970 def try_with_debug(function):
978 ################################################################################
980 # Function for use in sorting lists of architectures.
981 # Sorts normally except that 'source' dominates all others.
983 def arch_compare_sw (a, b):
984 if a == "source" and b == "source":
993 ################################################################################
995 # Split command line arguments which can be separated by either commas
996 # or whitespace. If dwim is set, it will complain about string ending
997 # in comma since this usually means someone did 'dak ls -a i386, m68k
998 # foo' or something and the inevitable confusion resulting from 'm68k'
999 # being treated as an argument is undesirable.
1001 def split_args (s, dwim=1):
1002 if s.find(",") == -1:
1005 if s[-1:] == "," and dwim:
1006 fubar("split_args: found trailing comma, spurious space maybe?")
1009 ################################################################################
1011 def Dict(**dict): return dict
1013 ########################################
1015 # Our very own version of commands.getouputstatus(), hacked to support
1017 def gpgv_get_status_output(cmd, status_read, status_write):
1018 cmd = ['/bin/sh', '-c', cmd]
1019 p2cread, p2cwrite = os.pipe()
1020 c2pread, c2pwrite = os.pipe()
1021 errout, errin = os.pipe()
1031 for i in range(3, 256):
1032 if i != status_write:
1038 os.execvp(cmd[0], cmd)
1044 os.dup2(c2pread, c2pwrite)
1045 os.dup2(errout, errin)
1047 output = status = ""
1049 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1052 r = os.read(fd, 8196)
1054 more_data.append(fd)
1055 if fd == c2pwrite or fd == errin:
1057 elif fd == status_read:
1060 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1062 pid, exit_status = os.waitpid(pid, 0)
1064 os.close(status_write)
1065 os.close(status_read)
1075 return output, status, exit_status
1077 ################################################################################
1079 def process_gpgv_output(status):
1080 # Process the status-fd output
1083 for line in status.split('\n'):
1087 split = line.split()
1089 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1091 (gnupg, keyword) = split[:2]
1092 if gnupg != "[GNUPG:]":
1093 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1096 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1097 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1100 keywords[keyword] = args
1102 return (keywords, internal_error)
1104 ################################################################################
1106 def retrieve_key (filename, keyserver=None, keyring=None):
1107 """Retrieve the key that signed 'filename' from 'keyserver' and
1108 add it to 'keyring'. Returns nothing on success, or an error message
1111 # Defaults for keyserver and keyring
1113 keyserver = Cnf["Dinstall::KeyServer"]
1115 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1117 # Ensure the filename contains no shell meta-characters or other badness
1118 if not re_taint_free.match(filename):
1119 return "%s: tainted filename" % (filename)
1121 # Invoke gpgv on the file
1122 status_read, status_write = os.pipe();
1123 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1124 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1126 # Process the status-fd output
1127 (keywords, internal_error) = process_gpgv_output(status)
1129 return internal_error
1131 if not keywords.has_key("NO_PUBKEY"):
1132 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1134 fingerprint = keywords["NO_PUBKEY"][0]
1135 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1136 # it'll try to create a lockfile in /dev. A better solution might
1137 # be a tempfile or something.
1138 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1139 % (Cnf["Dinstall::SigningKeyring"])
1140 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1141 % (keyring, keyserver, fingerprint)
1142 (result, output) = commands.getstatusoutput(cmd)
1144 return "'%s' failed with exit code %s" % (cmd, result)
1148 ################################################################################
1150 def gpg_keyring_args(keyrings=None):
1152 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1154 return " ".join(["--keyring %s" % x for x in keyrings])
1156 ################################################################################
1158 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1159 """Check the signature of a file and return the fingerprint if the
1160 signature is valid or 'None' if it's not. The first argument is the
1161 filename whose signature should be checked. The second argument is a
1162 reject function and is called when an error is found. The reject()
1163 function must allow for two arguments: the first is the error message,
1164 the second is an optional prefix string. It's possible for reject()
1165 to be called more than once during an invocation of check_signature().
1166 The third argument is optional and is the name of the files the
1167 detached signature applies to. The fourth argument is optional and is
1168 a *list* of keyrings to use. 'autofetch' can either be None, True or
1169 False. If None, the default behaviour specified in the config will be
1172 # Ensure the filename contains no shell meta-characters or other badness
1173 if not re_taint_free.match(sig_filename):
1174 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1177 if data_filename and not re_taint_free.match(data_filename):
1178 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1182 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1184 # Autofetch the signing key if that's enabled
1185 if autofetch == None:
1186 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1188 error_msg = retrieve_key(sig_filename)
1193 # Build the command line
1194 status_read, status_write = os.pipe();
1195 cmd = "gpgv --status-fd %s %s %s %s" % (
1196 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1198 # Invoke gpgv on the file
1199 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1201 # Process the status-fd output
1202 (keywords, internal_error) = process_gpgv_output(status)
1204 # If we failed to parse the status-fd output, let's just whine and bail now
1206 reject("internal error while performing signature check on %s." % (sig_filename))
1207 reject(internal_error, "")
1208 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1212 # Now check for obviously bad things in the processed output
1213 if keywords.has_key("KEYREVOKED"):
1214 reject("The key used to sign %s has been revoked." % (sig_filename))
1216 if keywords.has_key("BADSIG"):
1217 reject("bad signature on %s." % (sig_filename))
1219 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1220 reject("failed to check signature on %s." % (sig_filename))
1222 if keywords.has_key("NO_PUBKEY"):
1223 args = keywords["NO_PUBKEY"]
1226 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1228 if keywords.has_key("BADARMOR"):
1229 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1231 if keywords.has_key("NODATA"):
1232 reject("no signature found in %s." % (sig_filename))
1234 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1235 args = keywords["KEYEXPIRED"]
1238 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1244 # Next check gpgv exited with a zero return code
1246 reject("gpgv failed while checking %s." % (sig_filename))
1248 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1250 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1253 # Sanity check the good stuff we expect
1254 if not keywords.has_key("VALIDSIG"):
1255 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1258 args = keywords["VALIDSIG"]
1260 reject("internal error while checking signature on %s." % (sig_filename))
1263 fingerprint = args[0]
1264 if not keywords.has_key("GOODSIG"):
1265 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1267 if not keywords.has_key("SIG_ID"):
1268 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1271 # Finally ensure there's not something we don't recognise
1272 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1273 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1274 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1276 for keyword in keywords.keys():
1277 if not known_keywords.has_key(keyword):
1278 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1286 ################################################################################
1288 def gpg_get_key_addresses(fingerprint):
1289 """retreive email addresses from gpg key uids for a given fingerprint"""
1290 addresses = key_uid_email_cache.get(fingerprint)
1291 if addresses != None:
1294 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1295 % (gpg_keyring_args(), fingerprint)
1296 (result, output) = commands.getstatusoutput(cmd)
1298 for l in output.split('\n'):
1299 m = re_gpg_uid.match(l)
1301 addresses.add(m.group(1))
1302 key_uid_email_cache[fingerprint] = addresses
1305 ################################################################################
1307 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1309 def wrap(paragraph, max_length, prefix=""):
1313 words = paragraph.split()
1316 word_size = len(word)
1317 if word_size > max_length:
1319 s += line + '\n' + prefix
1320 s += word + '\n' + prefix
1323 new_length = len(line) + word_size + 1
1324 if new_length > max_length:
1325 s += line + '\n' + prefix
1338 ################################################################################
1340 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1341 # Returns fixed 'src'
1342 def clean_symlink (src, dest, root):
1343 src = src.replace(root, '', 1)
1344 dest = dest.replace(root, '', 1)
1345 dest = os.path.dirname(dest)
1346 new_src = '../' * len(dest.split('/'))
1347 return new_src + src
1349 ################################################################################
1351 def temp_filename(directory=None, dotprefix=None, perms=0700):
1352 """Return a secure and unique filename by pre-creating it.
1353 If 'directory' is non-null, it will be the directory the file is pre-created in.
1354 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1357 old_tempdir = tempfile.tempdir
1358 tempfile.tempdir = directory
1360 filename = tempfile.mktemp()
1363 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1364 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1368 tempfile.tempdir = old_tempdir
1372 ################################################################################
1374 # checks if the user part of the email is listed in the alias file
1376 def is_email_alias(email):
1378 if alias_cache == None:
1379 aliasfn = which_alias_file()
1382 for l in open(aliasfn):
1383 alias_cache.add(l.split(':')[0])
1384 uid = email.split('@')[0]
1385 return uid in alias_cache
1387 ################################################################################
1391 Cnf = apt_pkg.newConfiguration()
1392 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1394 if which_conf_file() != default_config:
1395 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1397 ################################################################################