2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
30 from dak_exceptions import *
32 ################################################################################
34 re_comments = re.compile(r"\#.*")
35 re_no_epoch = re.compile(r"^\d+\:")
36 re_no_revision = re.compile(r"-[^-]+$")
37 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
38 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
39 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
40 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
42 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
43 re_multi_line_field = re.compile(r"^\s(.*)")
44 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
46 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
47 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
49 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
50 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
52 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
54 html_escaping = {'"':'"', '&':'&', '<':'<', '>':'>'}
55 re_html_escaping = re.compile('|'.join(map(re.escape, html_escaping.keys())))
57 default_config = "/etc/dak/dak.conf"
58 default_apt_config = "/etc/dak/apt.conf"
61 key_uid_email_cache = {}
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))]
67 ################################################################################
70 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
72 ################################################################################
74 def open_file(filename, mode='r'):
76 f = open(filename, mode)
78 raise CantOpenError, filename
81 ################################################################################
83 def our_raw_input(prompt=""):
85 sys.stdout.write(prompt)
91 sys.stderr.write("\nUser interrupt (^D).\n")
94 ################################################################################
96 def extract_component_from_section(section):
99 if section.find('/') != -1:
100 component = section.split('/')[0]
102 # Expand default component
104 if Cnf.has_key("Component::%s" % section):
109 return (section, component)
111 ################################################################################
113 def parse_deb822(contents, signing_rules=0):
117 # Split the lines in the input, keeping the linebreaks.
118 lines = contents.splitlines(True)
121 raise ParseChangesError, "[Empty changes file]"
123 # Reindex by line number so we can easily verify the format of
129 indexed_lines[index] = line[:-1]
133 num_of_lines = len(indexed_lines.keys())
136 while index < num_of_lines:
138 line = indexed_lines[index]
140 if signing_rules == 1:
142 if index > num_of_lines:
143 raise InvalidDscError, index
144 line = indexed_lines[index]
145 if not line.startswith("-----BEGIN PGP SIGNATURE"):
146 raise InvalidDscError, index
151 if line.startswith("-----BEGIN PGP SIGNATURE"):
153 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
155 if signing_rules == 1:
156 while index < num_of_lines and line != "":
158 line = indexed_lines[index]
160 # If we're not inside the signed data, don't process anything
161 if signing_rules >= 0 and not inside_signature:
163 slf = re_single_line_field.match(line)
165 field = slf.groups()[0].lower()
166 changes[field] = slf.groups()[1]
170 changes[field] += '\n'
172 mlf = re_multi_line_field.match(line)
175 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
176 if first == 1 and changes[field] != "":
177 changes[field] += '\n'
179 changes[field] += mlf.groups()[0] + '\n'
183 if signing_rules == 1 and inside_signature:
184 raise InvalidDscError, index
186 changes["filecontents"] = "".join(lines)
188 if changes.has_key("source"):
189 # Strip the source version in brackets from the source field,
190 # put it in the "source-version" field instead.
191 srcver = re_srchasver.search(changes["source"])
193 changes["source"] = srcver.group(1)
194 changes["source-version"] = srcver.group(2)
197 raise ParseChangesError, error
201 ################################################################################
203 def parse_changes(filename, signing_rules=0):
204 """Parses a changes file and returns a dictionary where each field is a
205 key. The mandatory first argument is the filename of the .changes
208 signing_rules is an optional argument:
210 o If signing_rules == -1, no signature is required.
211 o If signing_rules == 0 (the default), a signature is required.
212 o If signing_rules == 1, it turns on the same strict format checking
215 The rules for (signing_rules == 1)-mode are:
217 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
218 followed by any PGP header data and must end with a blank line.
220 o The data section must end with a blank line and must be followed by
221 "-----BEGIN PGP SIGNATURE-----".
224 changes_in = open_file(filename)
225 content = changes_in.read()
227 return parse_deb822(content, signing_rules)
229 ################################################################################
231 def hash_key(hashname):
232 return '%ssum' % hashname
234 ################################################################################
236 def create_hash(where, files, hashname, hashfunc):
237 """create_hash extends the passed files dict with the given hash by
238 iterating over all files on disk and passing them to the hashing
242 for f in files.keys():
244 file_handle = open_file(f)
245 except CantOpenError:
246 rejmsg.append("Could not open file %s for checksumming" % (f))
248 files[f][hash_key(hashname)] = hashfunc(file_handle)
253 ################################################################################
255 def check_hash(where, files, hashname, hashfunc):
256 """check_hash checks the given hash in the files dict against the actual
257 files on disk. The hash values need to be present consistently in
258 all file entries. It does not modify its input in any way."""
261 for f in files.keys():
265 file_handle = open_file(f)
267 # Check for the hash entry, to not trigger a KeyError.
268 if not files[f].has_key(hash_key(hashname)):
269 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
273 # Actually check the hash for correctness.
274 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
275 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
277 except CantOpenError:
278 # TODO: This happens when the file is in the pool.
279 # warn("Cannot open file %s" % f)
286 ################################################################################
288 def check_size(where, files):
289 """check_size checks the file sizes in the passed files dict against the
293 for f in files.keys():
298 # TODO: This happens when the file is in the pool.
302 actual_size = entry[stat.ST_SIZE]
303 size = int(files[f]["size"])
304 if size != actual_size:
305 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
306 % (f, actual_size, size, where))
309 ################################################################################
311 def check_hash_fields(what, manifest):
312 """check_hash_fields ensures that there are no checksum fields in the
313 given dict that we do not know about."""
316 hashes = map(lambda x: x[0], known_hashes)
317 for field in manifest:
318 if field.startswith("checksums-"):
319 hashname = field.split("-",1)[1]
320 if hashname not in hashes:
321 rejmsg.append("Unsupported checksum field for %s "\
322 "in %s" % (hashname, what))
325 ################################################################################
327 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
328 if format >= version:
329 # The version should contain the specified hash.
332 # Import hashes from the changes
333 rejmsg = parse_checksums(".changes", files, changes, hashname)
337 # We need to calculate the hash because it can't possibly
340 return func(".changes", files, hashname, hashfunc)
342 # We could add the orig which might be in the pool to the files dict to
343 # access the checksums easily.
345 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
346 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
347 in the dsc is correct, i.e. identical to the changes file and if necessary
348 the pool. The latter task is delegated to check_hash."""
351 if not dsc.has_key('Checksums-%s' % (hashname,)):
353 # Import hashes from the dsc
354 parse_checksums(".dsc", dsc_files, dsc, hashname)
356 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
359 ################################################################################
361 def ensure_hashes(changes, dsc, files, dsc_files):
364 # Make sure we recognise the format of the Files: field in the .changes
365 format = changes.get("format", "0.0").split(".", 1)
367 format = int(format[0]), int(format[1])
369 format = int(float(format[0])), 0
371 # We need to deal with the original changes blob, as the fields we need
372 # might not be in the changes dict serialised into the .dak anymore.
373 orig_changes = parse_deb822(changes['filecontents'])
375 # Copy the checksums over to the current changes dict. This will keep
376 # the existing modifications to it intact.
377 for field in orig_changes:
378 if field.startswith('checksums-'):
379 changes[field] = orig_changes[field]
381 # Check for unsupported hashes
382 rejmsg.extend(check_hash_fields(".changes", changes))
383 rejmsg.extend(check_hash_fields(".dsc", dsc))
385 # We have to calculate the hash if we have an earlier changes version than
386 # the hash appears in rather than require it exist in the changes file
387 for hashname, hashfunc, version in known_hashes:
388 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
390 if "source" in changes["architecture"]:
391 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
396 def parse_checksums(where, files, manifest, hashname):
398 field = 'checksums-%s' % hashname
399 if not field in manifest:
401 for line in manifest[field].split('\n'):
404 checksum, size, checkfile = line.strip().split(' ')
405 if not files.has_key(checkfile):
406 # TODO: check for the file's entry in the original files dict, not
407 # the one modified by (auto)byhand and other weird stuff
408 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
409 # (file, hashname, where))
411 if not files[checkfile]["size"] == size:
412 rejmsg.append("%s: size differs for files and checksums-%s entry "\
413 "in %s" % (checkfile, hashname, where))
415 files[checkfile][hash_key(hashname)] = checksum
416 for f in files.keys():
417 if not files[f].has_key(hash_key(hashname)):
418 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
422 ################################################################################
424 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
426 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
429 # Make sure we have a Files: field to parse...
430 if not changes.has_key(field):
431 raise NoFilesFieldError
433 # Make sure we recognise the format of the Files: field
434 format = re_verwithext.search(changes.get("format", "0.0"))
436 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
438 format = format.groups()
439 if format[1] == None:
440 format = int(float(format[0])), 0, format[2]
442 format = int(format[0]), int(format[1]), format[2]
443 if format[2] == None:
447 # format = (1,0) are the only formats we currently accept,
448 # format = (0,0) are missing format headers of which we still
449 # have some in the archive.
450 if format != (1,0) and format != (0,0):
451 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
453 if (format < (1,5) or format > (1,8)):
454 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
455 if field != "files" and format < (1,8):
456 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
458 includes_section = (not is_a_dsc) and field == "files"
460 # Parse each entry/line:
461 for i in changes[field].split('\n'):
465 section = priority = ""
468 (md5, size, section, priority, name) = s
470 (md5, size, name) = s
472 raise ParseChangesError, i
479 (section, component) = extract_component_from_section(section)
481 files[name] = Dict(size=size, section=section,
482 priority=priority, component=component)
483 files[name][hashname] = md5
487 ################################################################################
489 def force_to_utf8(s):
490 """Forces a string to UTF-8. If the string isn't already UTF-8,
491 it's assumed to be ISO-8859-1."""
496 latin1_s = unicode(s,'iso8859-1')
497 return latin1_s.encode('utf-8')
499 def rfc2047_encode(s):
500 """Encodes a (header) string per RFC2047 if necessary. If the
501 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
503 codecs.lookup('ascii')[1](s)
508 codecs.lookup('utf-8')[1](s)
509 h = email.Header.Header(s, 'utf-8', 998)
512 h = email.Header.Header(s, 'iso-8859-1', 998)
515 ################################################################################
517 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
518 # with it. I know - I'll fix the suckage and make things
521 def fix_maintainer (maintainer):
522 """Parses a Maintainer or Changed-By field and returns:
523 (1) an RFC822 compatible version,
524 (2) an RFC2047 compatible version,
528 The name is forced to UTF-8 for both (1) and (3). If the name field
529 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
530 switched to 'email (name)' format."""
531 maintainer = maintainer.strip()
533 return ('', '', '', '')
535 if maintainer.find("<") == -1:
538 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
539 email = maintainer[1:-1]
542 m = re_parse_maintainer.match(maintainer)
544 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
548 # Get an RFC2047 compliant version of the name
549 rfc2047_name = rfc2047_encode(name)
551 # Force the name to be UTF-8
552 name = force_to_utf8(name)
554 if name.find(',') != -1 or name.find('.') != -1:
555 rfc822_maint = "%s (%s)" % (email, name)
556 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
558 rfc822_maint = "%s <%s>" % (name, email)
559 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
561 if email.find("@") == -1 and email.find("buildd_") != 0:
562 raise ParseMaintError, "No @ found in email address part."
564 return (rfc822_maint, rfc2047_maint, name, email)
566 ################################################################################
568 # sendmail wrapper, takes _either_ a message string or a file as arguments
569 def send_mail (message, filename=""):
570 # If we've been passed a string dump it into a temporary file
572 (fd, filename) = tempfile.mkstemp()
573 os.write (fd, message)
577 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
579 raise SendmailFailedError, output
581 # Clean up any temporary files
585 ################################################################################
587 def poolify (source, component):
590 if source[:3] == "lib":
591 return component + source[:4] + '/' + source + '/'
593 return component + source[:1] + '/' + source + '/'
595 ################################################################################
597 def move (src, dest, overwrite = 0, perms = 0664):
598 if os.path.exists(dest) and os.path.isdir(dest):
601 dest_dir = os.path.dirname(dest)
602 if not os.path.exists(dest_dir):
603 umask = os.umask(00000)
604 os.makedirs(dest_dir, 02775)
606 #print "Moving %s to %s..." % (src, dest)
607 if os.path.exists(dest) and os.path.isdir(dest):
608 dest += '/' + os.path.basename(src)
609 # Don't overwrite unless forced to
610 if os.path.exists(dest):
612 fubar("Can't move %s to %s - file already exists." % (src, dest))
614 if not os.access(dest, os.W_OK):
615 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
616 shutil.copy2(src, dest)
617 os.chmod(dest, perms)
620 def copy (src, dest, overwrite = 0, perms = 0664):
621 if os.path.exists(dest) and os.path.isdir(dest):
624 dest_dir = os.path.dirname(dest)
625 if not os.path.exists(dest_dir):
626 umask = os.umask(00000)
627 os.makedirs(dest_dir, 02775)
629 #print "Copying %s to %s..." % (src, dest)
630 if os.path.exists(dest) and os.path.isdir(dest):
631 dest += '/' + os.path.basename(src)
632 # Don't overwrite unless forced to
633 if os.path.exists(dest):
635 raise FileExistsError
637 if not os.access(dest, os.W_OK):
638 raise CantOverwriteError
639 shutil.copy2(src, dest)
640 os.chmod(dest, perms)
642 ################################################################################
645 res = socket.gethostbyaddr(socket.gethostname())
646 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
647 if database_hostname:
648 return database_hostname
652 def which_conf_file ():
653 res = socket.gethostbyaddr(socket.gethostname())
654 if Cnf.get("Config::" + res[0] + "::DakConfig"):
655 return Cnf["Config::" + res[0] + "::DakConfig"]
657 return default_config
659 def which_apt_conf_file ():
660 res = socket.gethostbyaddr(socket.gethostname())
661 if Cnf.get("Config::" + res[0] + "::AptConfig"):
662 return Cnf["Config::" + res[0] + "::AptConfig"]
664 return default_apt_config
666 def which_alias_file():
667 hostname = socket.gethostbyaddr(socket.gethostname())[0]
668 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
669 if os.path.exists(aliasfn):
674 ################################################################################
676 # Escape characters which have meaning to SQL's regex comparison operator ('~')
677 # (woefully incomplete)
680 s = s.replace('+', '\\\\+')
681 s = s.replace('.', '\\\\.')
684 ################################################################################
686 # Perform a substition of template
687 def TemplateSubst(map, filename):
688 templatefile = open_file(filename)
689 template = templatefile.read()
691 template = template.replace(x,map[x])
695 ################################################################################
697 def fubar(msg, exit_code=1):
698 sys.stderr.write("E: %s\n" % (msg))
702 sys.stderr.write("W: %s\n" % (msg))
704 ################################################################################
706 # Returns the user name with a laughable attempt at rfc822 conformancy
707 # (read: removing stray periods).
709 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
711 ################################################################################
721 return ("%d%s" % (c, t))
723 ################################################################################
725 def cc_fix_changes (changes):
726 o = changes.get("architecture", "")
728 del changes["architecture"]
729 changes["architecture"] = {}
731 changes["architecture"][j] = 1
733 # Sort by source name, source version, 'have source', and then by filename
734 def changes_compare (a, b):
736 a_changes = parse_changes(a)
741 b_changes = parse_changes(b)
745 cc_fix_changes (a_changes)
746 cc_fix_changes (b_changes)
748 # Sort by source name
749 a_source = a_changes.get("source")
750 b_source = b_changes.get("source")
751 q = cmp (a_source, b_source)
755 # Sort by source version
756 a_version = a_changes.get("version", "0")
757 b_version = b_changes.get("version", "0")
758 q = apt_pkg.VersionCompare(a_version, b_version)
762 # Sort by 'have source'
763 a_has_source = a_changes["architecture"].get("source")
764 b_has_source = b_changes["architecture"].get("source")
765 if a_has_source and not b_has_source:
767 elif b_has_source and not a_has_source:
770 # Fall back to sort by filename
773 ################################################################################
775 def find_next_free (dest, too_many=100):
778 while os.path.exists(dest) and extra < too_many:
779 dest = orig_dest + '.' + repr(extra)
781 if extra >= too_many:
782 raise NoFreeFilenameError
785 ################################################################################
787 def result_join (original, sep = '\t'):
789 for i in xrange(len(original)):
790 if original[i] == None:
791 resultlist.append("")
793 resultlist.append(original[i])
794 return sep.join(resultlist)
796 ################################################################################
798 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
800 for line in str.split('\n'):
802 if line or include_blank_lines:
803 out += "%s%s\n" % (prefix, line)
804 # Strip trailing new line
809 ################################################################################
811 def validate_changes_file_arg(filename, require_changes=1):
812 """'filename' is either a .changes or .dak file. If 'filename' is a
813 .dak file, it's changed to be the corresponding .changes file. The
814 function then checks if the .changes file a) exists and b) is
815 readable and returns the .changes filename if so. If there's a
816 problem, the next action depends on the option 'require_changes'
819 o If 'require_changes' == -1, errors are ignored and the .changes
820 filename is returned.
821 o If 'require_changes' == 0, a warning is given and 'None' is returned.
822 o If 'require_changes' == 1, a fatal error is raised.
826 orig_filename = filename
827 if filename.endswith(".dak"):
828 filename = filename[:-4]+".changes"
830 if not filename.endswith(".changes"):
831 error = "invalid file type; not a changes file"
833 if not os.access(filename,os.R_OK):
834 if os.path.exists(filename):
835 error = "permission denied"
837 error = "file not found"
840 if require_changes == 1:
841 fubar("%s: %s." % (orig_filename, error))
842 elif require_changes == 0:
843 warn("Skipping %s - %s" % (orig_filename, error))
845 else: # We only care about the .dak file
850 ################################################################################
853 return (arch != "source" and arch != "all")
855 ################################################################################
857 def join_with_commas_and(list):
858 if len(list) == 0: return "nothing"
859 if len(list) == 1: return list[0]
860 return ", ".join(list[:-1]) + " and " + list[-1]
862 ################################################################################
867 (pkg, version, constraint) = atom
869 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
872 pp_deps.append(pp_dep)
873 return " |".join(pp_deps)
875 ################################################################################
880 ################################################################################
882 # Handle -a, -c and -s arguments; returns them as SQL constraints
883 def parse_args(Options):
887 for suite in split_args(Options["Suite"]):
888 suite_id = database.get_suite_id(suite)
890 warn("suite '%s' not recognised." % (suite))
892 suite_ids_list.append(suite_id)
894 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
896 fubar("No valid suite given.")
901 if Options["Component"]:
902 component_ids_list = []
903 for component in split_args(Options["Component"]):
904 component_id = database.get_component_id(component)
905 if component_id == -1:
906 warn("component '%s' not recognised." % (component))
908 component_ids_list.append(component_id)
909 if component_ids_list:
910 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
912 fubar("No valid component given.")
916 # Process architecture
917 con_architectures = ""
918 if Options["Architecture"]:
921 for architecture in split_args(Options["Architecture"]):
922 if architecture == "source":
925 architecture_id = database.get_architecture_id(architecture)
926 if architecture_id == -1:
927 warn("architecture '%s' not recognised." % (architecture))
929 arch_ids_list.append(architecture_id)
931 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
934 fubar("No valid architecture given.")
938 return (con_suites, con_architectures, con_components, check_source)
940 ################################################################################
942 # Inspired(tm) by Bryn Keller's print_exc_plus (See
943 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
946 tb = sys.exc_info()[2]
955 traceback.print_exc()
957 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
958 frame.f_code.co_filename,
960 for key, value in frame.f_locals.items():
961 print "\t%20s = " % key,
965 print "<unable to print>"
967 ################################################################################
969 def try_with_debug(function):
977 ################################################################################
979 # Function for use in sorting lists of architectures.
980 # Sorts normally except that 'source' dominates all others.
982 def arch_compare_sw (a, b):
983 if a == "source" and b == "source":
992 ################################################################################
994 # Split command line arguments which can be separated by either commas
995 # or whitespace. If dwim is set, it will complain about string ending
996 # in comma since this usually means someone did 'dak ls -a i386, m68k
997 # foo' or something and the inevitable confusion resulting from 'm68k'
998 # being treated as an argument is undesirable.
1000 def split_args (s, dwim=1):
1001 if s.find(",") == -1:
1004 if s[-1:] == "," and dwim:
1005 fubar("split_args: found trailing comma, spurious space maybe?")
1008 ################################################################################
1010 def Dict(**dict): return dict
1012 ########################################
1014 # Our very own version of commands.getouputstatus(), hacked to support
1016 def gpgv_get_status_output(cmd, status_read, status_write):
1017 cmd = ['/bin/sh', '-c', cmd]
1018 p2cread, p2cwrite = os.pipe()
1019 c2pread, c2pwrite = os.pipe()
1020 errout, errin = os.pipe()
1030 for i in range(3, 256):
1031 if i != status_write:
1037 os.execvp(cmd[0], cmd)
1043 os.dup2(c2pread, c2pwrite)
1044 os.dup2(errout, errin)
1046 output = status = ""
1048 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1051 r = os.read(fd, 8196)
1053 more_data.append(fd)
1054 if fd == c2pwrite or fd == errin:
1056 elif fd == status_read:
1059 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1061 pid, exit_status = os.waitpid(pid, 0)
1063 os.close(status_write)
1064 os.close(status_read)
1074 return output, status, exit_status
1076 ################################################################################
1078 def process_gpgv_output(status):
1079 # Process the status-fd output
1082 for line in status.split('\n'):
1086 split = line.split()
1088 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1090 (gnupg, keyword) = split[:2]
1091 if gnupg != "[GNUPG:]":
1092 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1095 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1096 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1099 keywords[keyword] = args
1101 return (keywords, internal_error)
1103 ################################################################################
1105 def retrieve_key (filename, keyserver=None, keyring=None):
1106 """Retrieve the key that signed 'filename' from 'keyserver' and
1107 add it to 'keyring'. Returns nothing on success, or an error message
1110 # Defaults for keyserver and keyring
1112 keyserver = Cnf["Dinstall::KeyServer"]
1114 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1116 # Ensure the filename contains no shell meta-characters or other badness
1117 if not re_taint_free.match(filename):
1118 return "%s: tainted filename" % (filename)
1120 # Invoke gpgv on the file
1121 status_read, status_write = os.pipe();
1122 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1123 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1125 # Process the status-fd output
1126 (keywords, internal_error) = process_gpgv_output(status)
1128 return internal_error
1130 if not keywords.has_key("NO_PUBKEY"):
1131 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1133 fingerprint = keywords["NO_PUBKEY"][0]
1134 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1135 # it'll try to create a lockfile in /dev. A better solution might
1136 # be a tempfile or something.
1137 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1138 % (Cnf["Dinstall::SigningKeyring"])
1139 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1140 % (keyring, keyserver, fingerprint)
1141 (result, output) = commands.getstatusoutput(cmd)
1143 return "'%s' failed with exit code %s" % (cmd, result)
1147 ################################################################################
1149 def gpg_keyring_args(keyrings=None):
1151 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1153 return " ".join(["--keyring %s" % x for x in keyrings])
1155 ################################################################################
1157 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1158 """Check the signature of a file and return the fingerprint if the
1159 signature is valid or 'None' if it's not. The first argument is the
1160 filename whose signature should be checked. The second argument is a
1161 reject function and is called when an error is found. The reject()
1162 function must allow for two arguments: the first is the error message,
1163 the second is an optional prefix string. It's possible for reject()
1164 to be called more than once during an invocation of check_signature().
1165 The third argument is optional and is the name of the files the
1166 detached signature applies to. The fourth argument is optional and is
1167 a *list* of keyrings to use. 'autofetch' can either be None, True or
1168 False. If None, the default behaviour specified in the config will be
1171 # Ensure the filename contains no shell meta-characters or other badness
1172 if not re_taint_free.match(sig_filename):
1173 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1176 if data_filename and not re_taint_free.match(data_filename):
1177 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1181 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1183 # Autofetch the signing key if that's enabled
1184 if autofetch == None:
1185 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1187 error_msg = retrieve_key(sig_filename)
1192 # Build the command line
1193 status_read, status_write = os.pipe();
1194 cmd = "gpgv --status-fd %s %s %s %s" % (
1195 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1197 # Invoke gpgv on the file
1198 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1200 # Process the status-fd output
1201 (keywords, internal_error) = process_gpgv_output(status)
1203 # If we failed to parse the status-fd output, let's just whine and bail now
1205 reject("internal error while performing signature check on %s." % (sig_filename))
1206 reject(internal_error, "")
1207 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1211 # Now check for obviously bad things in the processed output
1212 if keywords.has_key("KEYREVOKED"):
1213 reject("The key used to sign %s has been revoked." % (sig_filename))
1215 if keywords.has_key("BADSIG"):
1216 reject("bad signature on %s." % (sig_filename))
1218 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1219 reject("failed to check signature on %s." % (sig_filename))
1221 if keywords.has_key("NO_PUBKEY"):
1222 args = keywords["NO_PUBKEY"]
1225 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1227 if keywords.has_key("BADARMOR"):
1228 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1230 if keywords.has_key("NODATA"):
1231 reject("no signature found in %s." % (sig_filename))
1233 if keywords.has_key("EXPKEYSIG"):
1234 args = keywords["EXPKEYSIG"]
1237 reject("Signature made by expired key 0x%s" % (key))
1239 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1240 args = keywords["KEYEXPIRED"]
1244 if timestamp.count("T") == 0:
1245 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1247 expiredate = timestamp
1248 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1254 # Next check gpgv exited with a zero return code
1256 reject("gpgv failed while checking %s." % (sig_filename))
1258 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1260 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1263 # Sanity check the good stuff we expect
1264 if not keywords.has_key("VALIDSIG"):
1265 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1268 args = keywords["VALIDSIG"]
1270 reject("internal error while checking signature on %s." % (sig_filename))
1273 fingerprint = args[0]
1274 if not keywords.has_key("GOODSIG"):
1275 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1277 if not keywords.has_key("SIG_ID"):
1278 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1281 # Finally ensure there's not something we don't recognise
1282 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1283 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1284 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1286 for keyword in keywords.keys():
1287 if not known_keywords.has_key(keyword):
1288 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1296 ################################################################################
1298 def gpg_get_key_addresses(fingerprint):
1299 """retreive email addresses from gpg key uids for a given fingerprint"""
1300 addresses = key_uid_email_cache.get(fingerprint)
1301 if addresses != None:
1304 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1305 % (gpg_keyring_args(), fingerprint)
1306 (result, output) = commands.getstatusoutput(cmd)
1308 for l in output.split('\n'):
1309 m = re_gpg_uid.match(l)
1311 addresses.add(m.group(1))
1312 key_uid_email_cache[fingerprint] = addresses
1315 ################################################################################
1317 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1319 def wrap(paragraph, max_length, prefix=""):
1323 words = paragraph.split()
1326 word_size = len(word)
1327 if word_size > max_length:
1329 s += line + '\n' + prefix
1330 s += word + '\n' + prefix
1333 new_length = len(line) + word_size + 1
1334 if new_length > max_length:
1335 s += line + '\n' + prefix
1348 ################################################################################
1350 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1351 # Returns fixed 'src'
1352 def clean_symlink (src, dest, root):
1353 src = src.replace(root, '', 1)
1354 dest = dest.replace(root, '', 1)
1355 dest = os.path.dirname(dest)
1356 new_src = '../' * len(dest.split('/'))
1357 return new_src + src
1359 ################################################################################
1361 def temp_filename(directory=None, prefix="dak", suffix=""):
1362 """Return a secure and unique filename by pre-creating it.
1363 If 'directory' is non-null, it will be the directory the file is pre-created in.
1364 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1365 If 'suffix' is non-null, the filename will end with it.
1367 Returns a pair (fd, name).
1370 return tempfile.mkstemp(suffix, prefix, directory)
1372 ################################################################################
1374 # checks if the user part of the email is listed in the alias file
1376 def is_email_alias(email):
1378 if alias_cache == None:
1379 aliasfn = which_alias_file()
1382 for l in open(aliasfn):
1383 alias_cache.add(l.split(':')[0])
1384 uid = email.split('@')[0]
1385 return uid in alias_cache
1387 ################################################################################
1391 Cnf = apt_pkg.newConfiguration()
1392 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1394 if which_conf_file() != default_config:
1395 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1397 ################################################################################