2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
30 from dak_exceptions import *
32 ################################################################################
34 re_comments = re.compile(r"\#.*")
35 re_no_epoch = re.compile(r"^\d+\:")
36 re_no_revision = re.compile(r"-[^-]+$")
37 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
38 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
39 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
40 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
42 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
43 re_multi_line_field = re.compile(r"^\s(.*)")
44 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
46 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
47 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
49 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
50 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
52 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
54 html_escaping = {'"':'"', '&':'&', '<':'<', '>':'>'}
55 re_html_escaping = re.compile('|'.join(map(re.escape, html_escaping.keys())))
57 default_config = "/etc/dak/dak.conf"
58 default_apt_config = "/etc/dak/apt.conf"
61 key_uid_email_cache = {}
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))]
67 ################################################################################
70 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
72 ################################################################################
74 def open_file(filename, mode='r'):
76 f = open(filename, mode)
78 raise CantOpenError, filename
81 ################################################################################
83 def our_raw_input(prompt=""):
85 sys.stdout.write(prompt)
91 sys.stderr.write("\nUser interrupt (^D).\n")
94 ################################################################################
96 def extract_component_from_section(section):
99 if section.find('/') != -1:
100 component = section.split('/')[0]
102 # Expand default component
104 if Cnf.has_key("Component::%s" % section):
109 return (section, component)
111 ################################################################################
113 def parse_deb822(contents, signing_rules=0):
117 # Split the lines in the input, keeping the linebreaks.
118 lines = contents.splitlines(True)
121 raise ParseChangesError, "[Empty changes file]"
123 # Reindex by line number so we can easily verify the format of
129 indexed_lines[index] = line[:-1]
133 num_of_lines = len(indexed_lines.keys())
136 while index < num_of_lines:
138 line = indexed_lines[index]
140 if signing_rules == 1:
142 if index > num_of_lines:
143 raise InvalidDscError, index
144 line = indexed_lines[index]
145 if not line.startswith("-----BEGIN PGP SIGNATURE"):
146 raise InvalidDscError, index
151 if line.startswith("-----BEGIN PGP SIGNATURE"):
153 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
155 if signing_rules == 1:
156 while index < num_of_lines and line != "":
158 line = indexed_lines[index]
160 # If we're not inside the signed data, don't process anything
161 if signing_rules >= 0 and not inside_signature:
163 slf = re_single_line_field.match(line)
165 field = slf.groups()[0].lower()
166 changes[field] = slf.groups()[1]
170 changes[field] += '\n'
172 mlf = re_multi_line_field.match(line)
175 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
176 if first == 1 and changes[field] != "":
177 changes[field] += '\n'
179 changes[field] += mlf.groups()[0] + '\n'
183 if signing_rules == 1 and inside_signature:
184 raise InvalidDscError, index
186 changes["filecontents"] = "".join(lines)
188 if changes.has_key("source"):
189 # Strip the source version in brackets from the source field,
190 # put it in the "source-version" field instead.
191 srcver = re_srchasver.search(changes["source"])
193 changes["source"] = srcver.group(1)
194 changes["source-version"] = srcver.group(2)
197 raise ParseChangesError, error
201 ################################################################################
203 def parse_changes(filename, signing_rules=0):
204 """Parses a changes file and returns a dictionary where each field is a
205 key. The mandatory first argument is the filename of the .changes
208 signing_rules is an optional argument:
210 o If signing_rules == -1, no signature is required.
211 o If signing_rules == 0 (the default), a signature is required.
212 o If signing_rules == 1, it turns on the same strict format checking
215 The rules for (signing_rules == 1)-mode are:
217 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
218 followed by any PGP header data and must end with a blank line.
220 o The data section must end with a blank line and must be followed by
221 "-----BEGIN PGP SIGNATURE-----".
224 changes_in = open_file(filename)
225 content = changes_in.read()
227 return parse_deb822(content, signing_rules)
229 ################################################################################
231 def hash_key(hashname):
232 return '%ssum' % hashname
234 ################################################################################
236 def create_hash(where, files, hashname, hashfunc):
237 """create_hash extends the passed files dict with the given hash by
238 iterating over all files on disk and passing them to the hashing
242 for f in files.keys():
244 file_handle = open_file(f)
245 except CantOpenError:
246 rejmsg.append("Could not open file %s for checksumming" % (f))
248 files[f][hash_key(hashname)] = hashfunc(file_handle)
253 ################################################################################
255 def check_hash(where, files, hashname, hashfunc):
256 """check_hash checks the given hash in the files dict against the actual
257 files on disk. The hash values need to be present consistently in
258 all file entries. It does not modify its input in any way."""
261 for f in files.keys():
265 file_handle = open_file(f)
267 # Check for the hash entry, to not trigger a KeyError.
268 if not files[f].has_key(hash_key(hashname)):
269 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
273 # Actually check the hash for correctness.
274 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
275 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
277 except CantOpenError:
278 # TODO: This happens when the file is in the pool.
279 # warn("Cannot open file %s" % f)
286 ################################################################################
288 def check_size(where, files):
289 """check_size checks the file sizes in the passed files dict against the
293 for f in files.keys():
298 # TODO: This happens when the file is in the pool.
302 actual_size = entry[stat.ST_SIZE]
303 size = int(files[f]["size"])
304 if size != actual_size:
305 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
306 % (f, actual_size, size, where))
309 ################################################################################
311 def check_hash_fields(what, manifest):
312 """check_hash_fields ensures that there are no checksum fields in the
313 given dict that we do not know about."""
316 hashes = map(lambda x: x[0], known_hashes)
317 for field in manifest:
318 if field.startswith("checksums-"):
319 hashname = field.split("-",1)[1]
320 if hashname not in hashes:
321 rejmsg.append("Unsupported checksum field for %s "\
322 "in %s" % (hashname, what))
325 ################################################################################
327 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
328 if format >= version:
329 # The version should contain the specified hash.
332 # Import hashes from the changes
333 rejmsg = parse_checksums(".changes", files, changes, hashname)
337 # We need to calculate the hash because it can't possibly
340 return func(".changes", files, hashname, hashfunc)
342 # We could add the orig which might be in the pool to the files dict to
343 # access the checksums easily.
345 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
346 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
347 in the dsc is correct, i.e. identical to the changes file and if necessary
348 the pool. The latter task is delegated to check_hash."""
351 if not dsc.has_key('Checksums-%s' % (hashname,)):
353 # Import hashes from the dsc
354 parse_checksums(".dsc", dsc_files, dsc, hashname)
356 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
359 ################################################################################
361 def ensure_hashes(changes, dsc, files, dsc_files):
364 # Make sure we recognise the format of the Files: field in the .changes
365 format = changes.get("format", "0.0").split(".", 1)
367 format = int(format[0]), int(format[1])
369 format = int(float(format[0])), 0
371 # We need to deal with the original changes blob, as the fields we need
372 # might not be in the changes dict serialised into the .dak anymore.
373 orig_changes = parse_deb822(changes['filecontents'])
375 # Copy the checksums over to the current changes dict. This will keep
376 # the existing modifications to it intact.
377 for field in orig_changes:
378 if field.startswith('checksums-'):
379 changes[field] = orig_changes[field]
381 # Check for unsupported hashes
382 rejmsg.extend(check_hash_fields(".changes", changes))
383 rejmsg.extend(check_hash_fields(".dsc", dsc))
385 # We have to calculate the hash if we have an earlier changes version than
386 # the hash appears in rather than require it exist in the changes file
387 for hashname, hashfunc, version in known_hashes:
388 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
390 if "source" in changes["architecture"]:
391 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
396 def parse_checksums(where, files, manifest, hashname):
398 field = 'checksums-%s' % hashname
399 if not field in manifest:
401 for line in manifest[field].split('\n'):
404 checksum, size, checkfile = line.strip().split(' ')
405 if not files.has_key(checkfile):
406 # TODO: check for the file's entry in the original files dict, not
407 # the one modified by (auto)byhand and other weird stuff
408 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
409 # (file, hashname, where))
411 if not files[checkfile]["size"] == size:
412 rejmsg.append("%s: size differs for files and checksums-%s entry "\
413 "in %s" % (checkfile, hashname, where))
415 files[checkfile][hash_key(hashname)] = checksum
416 for f in files.keys():
417 if not files[f].has_key(hash_key(hashname)):
418 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
422 ################################################################################
424 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
426 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
429 # Make sure we have a Files: field to parse...
430 if not changes.has_key(field):
431 raise NoFilesFieldError
433 # Make sure we recognise the format of the Files: field
434 format = re_verwithext.search(changes.get("format", "0.0"))
436 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
438 format = format.groups()
439 if format[1] == None:
440 format = int(float(format[0])), 0, format[2]
442 format = int(format[0]), int(format[1]), format[2]
443 if format[2] == None:
447 # format = (1,0) are the only formats we currently accept,
448 # format = (0,0) are missing format headers of which we still
449 # have some in the archive.
450 if format != (1,0) and format != (0,0):
451 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
453 if (format < (1,5) or format > (1,8)):
454 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
455 if field != "files" and format < (1,8):
456 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
458 includes_section = (not is_a_dsc) and field == "files"
460 # Parse each entry/line:
461 for i in changes[field].split('\n'):
465 section = priority = ""
468 (md5, size, section, priority, name) = s
470 (md5, size, name) = s
472 raise ParseChangesError, i
479 (section, component) = extract_component_from_section(section)
481 files[name] = Dict(size=size, section=section,
482 priority=priority, component=component)
483 files[name][hashname] = md5
487 ################################################################################
489 def force_to_utf8(s):
490 """Forces a string to UTF-8. If the string isn't already UTF-8,
491 it's assumed to be ISO-8859-1."""
496 latin1_s = unicode(s,'iso8859-1')
497 return latin1_s.encode('utf-8')
499 def rfc2047_encode(s):
500 """Encodes a (header) string per RFC2047 if necessary. If the
501 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
503 codecs.lookup('ascii')[1](s)
508 codecs.lookup('utf-8')[1](s)
509 h = email.Header.Header(s, 'utf-8', 998)
512 h = email.Header.Header(s, 'iso-8859-1', 998)
515 ################################################################################
517 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
518 # with it. I know - I'll fix the suckage and make things
521 def fix_maintainer (maintainer):
522 """Parses a Maintainer or Changed-By field and returns:
523 (1) an RFC822 compatible version,
524 (2) an RFC2047 compatible version,
528 The name is forced to UTF-8 for both (1) and (3). If the name field
529 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
530 switched to 'email (name)' format."""
531 maintainer = maintainer.strip()
533 return ('', '', '', '')
535 if maintainer.find("<") == -1:
538 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
539 email = maintainer[1:-1]
542 m = re_parse_maintainer.match(maintainer)
544 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
548 # Get an RFC2047 compliant version of the name
549 rfc2047_name = rfc2047_encode(name)
551 # Force the name to be UTF-8
552 name = force_to_utf8(name)
554 if name.find(',') != -1 or name.find('.') != -1:
555 rfc822_maint = "%s (%s)" % (email, name)
556 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
558 rfc822_maint = "%s <%s>" % (name, email)
559 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
561 if email.find("@") == -1 and email.find("buildd_") != 0:
562 raise ParseMaintError, "No @ found in email address part."
564 return (rfc822_maint, rfc2047_maint, name, email)
566 ################################################################################
568 # sendmail wrapper, takes _either_ a message string or a file as arguments
569 def send_mail (message, filename=""):
570 # If we've been passed a string dump it into a temporary file
572 filename = tempfile.mktemp()
573 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
574 os.write (fd, message)
578 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
580 raise SendmailFailedError, output
582 # Clean up any temporary files
586 ################################################################################
588 def poolify (source, component):
591 if source[:3] == "lib":
592 return component + source[:4] + '/' + source + '/'
594 return component + source[:1] + '/' + source + '/'
596 ################################################################################
598 def move (src, dest, overwrite = 0, perms = 0664):
599 if os.path.exists(dest) and os.path.isdir(dest):
602 dest_dir = os.path.dirname(dest)
603 if not os.path.exists(dest_dir):
604 umask = os.umask(00000)
605 os.makedirs(dest_dir, 02775)
607 #print "Moving %s to %s..." % (src, dest)
608 if os.path.exists(dest) and os.path.isdir(dest):
609 dest += '/' + os.path.basename(src)
610 # Don't overwrite unless forced to
611 if os.path.exists(dest):
613 fubar("Can't move %s to %s - file already exists." % (src, dest))
615 if not os.access(dest, os.W_OK):
616 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
617 shutil.copy2(src, dest)
618 os.chmod(dest, perms)
621 def copy (src, dest, overwrite = 0, perms = 0664):
622 if os.path.exists(dest) and os.path.isdir(dest):
625 dest_dir = os.path.dirname(dest)
626 if not os.path.exists(dest_dir):
627 umask = os.umask(00000)
628 os.makedirs(dest_dir, 02775)
630 #print "Copying %s to %s..." % (src, dest)
631 if os.path.exists(dest) and os.path.isdir(dest):
632 dest += '/' + os.path.basename(src)
633 # Don't overwrite unless forced to
634 if os.path.exists(dest):
636 raise FileExistsError
638 if not os.access(dest, os.W_OK):
639 raise CantOverwriteError
640 shutil.copy2(src, dest)
641 os.chmod(dest, perms)
643 ################################################################################
646 res = socket.gethostbyaddr(socket.gethostname())
647 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
648 if database_hostname:
649 return database_hostname
653 def which_conf_file ():
654 res = socket.gethostbyaddr(socket.gethostname())
655 if Cnf.get("Config::" + res[0] + "::DakConfig"):
656 return Cnf["Config::" + res[0] + "::DakConfig"]
658 return default_config
660 def which_apt_conf_file ():
661 res = socket.gethostbyaddr(socket.gethostname())
662 if Cnf.get("Config::" + res[0] + "::AptConfig"):
663 return Cnf["Config::" + res[0] + "::AptConfig"]
665 return default_apt_config
667 def which_alias_file():
668 hostname = socket.gethostbyaddr(socket.gethostname())[0]
669 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
670 if os.path.exists(aliasfn):
675 ################################################################################
677 # Escape characters which have meaning to SQL's regex comparison operator ('~')
678 # (woefully incomplete)
681 s = s.replace('+', '\\\\+')
682 s = s.replace('.', '\\\\.')
685 ################################################################################
687 # Perform a substition of template
688 def TemplateSubst(map, filename):
689 file = open_file(filename)
690 template = file.read()
692 template = template.replace(x,map[x])
696 ################################################################################
698 def fubar(msg, exit_code=1):
699 sys.stderr.write("E: %s\n" % (msg))
703 sys.stderr.write("W: %s\n" % (msg))
705 ################################################################################
707 # Returns the user name with a laughable attempt at rfc822 conformancy
708 # (read: removing stray periods).
710 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
712 ################################################################################
722 return ("%d%s" % (c, t))
724 ################################################################################
726 def cc_fix_changes (changes):
727 o = changes.get("architecture", "")
729 del changes["architecture"]
730 changes["architecture"] = {}
732 changes["architecture"][j] = 1
734 # Sort by source name, source version, 'have source', and then by filename
735 def changes_compare (a, b):
737 a_changes = parse_changes(a)
742 b_changes = parse_changes(b)
746 cc_fix_changes (a_changes)
747 cc_fix_changes (b_changes)
749 # Sort by source name
750 a_source = a_changes.get("source")
751 b_source = b_changes.get("source")
752 q = cmp (a_source, b_source)
756 # Sort by source version
757 a_version = a_changes.get("version", "0")
758 b_version = b_changes.get("version", "0")
759 q = apt_pkg.VersionCompare(a_version, b_version)
763 # Sort by 'have source'
764 a_has_source = a_changes["architecture"].get("source")
765 b_has_source = b_changes["architecture"].get("source")
766 if a_has_source and not b_has_source:
768 elif b_has_source and not a_has_source:
771 # Fall back to sort by filename
774 ################################################################################
776 def find_next_free (dest, too_many=100):
779 while os.path.exists(dest) and extra < too_many:
780 dest = orig_dest + '.' + repr(extra)
782 if extra >= too_many:
783 raise NoFreeFilenameError
786 ################################################################################
788 def result_join (original, sep = '\t'):
790 for i in xrange(len(original)):
791 if original[i] == None:
794 list.append(original[i])
795 return sep.join(list)
797 ################################################################################
799 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
801 for line in str.split('\n'):
803 if line or include_blank_lines:
804 out += "%s%s\n" % (prefix, line)
805 # Strip trailing new line
810 ################################################################################
812 def validate_changes_file_arg(filename, require_changes=1):
813 """'filename' is either a .changes or .dak file. If 'filename' is a
814 .dak file, it's changed to be the corresponding .changes file. The
815 function then checks if the .changes file a) exists and b) is
816 readable and returns the .changes filename if so. If there's a
817 problem, the next action depends on the option 'require_changes'
820 o If 'require_changes' == -1, errors are ignored and the .changes
821 filename is returned.
822 o If 'require_changes' == 0, a warning is given and 'None' is returned.
823 o If 'require_changes' == 1, a fatal error is raised.
827 orig_filename = filename
828 if filename.endswith(".dak"):
829 filename = filename[:-4]+".changes"
831 if not filename.endswith(".changes"):
832 error = "invalid file type; not a changes file"
834 if not os.access(filename,os.R_OK):
835 if os.path.exists(filename):
836 error = "permission denied"
838 error = "file not found"
841 if require_changes == 1:
842 fubar("%s: %s." % (orig_filename, error))
843 elif require_changes == 0:
844 warn("Skipping %s - %s" % (orig_filename, error))
846 else: # We only care about the .dak file
851 ################################################################################
854 return (arch != "source" and arch != "all")
856 ################################################################################
858 def join_with_commas_and(list):
859 if len(list) == 0: return "nothing"
860 if len(list) == 1: return list[0]
861 return ", ".join(list[:-1]) + " and " + list[-1]
863 ################################################################################
868 (pkg, version, constraint) = atom
870 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
873 pp_deps.append(pp_dep)
874 return " |".join(pp_deps)
876 ################################################################################
881 ################################################################################
883 # Handle -a, -c and -s arguments; returns them as SQL constraints
884 def parse_args(Options):
888 for suite in split_args(Options["Suite"]):
889 suite_id = database.get_suite_id(suite)
891 warn("suite '%s' not recognised." % (suite))
893 suite_ids_list.append(suite_id)
895 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
897 fubar("No valid suite given.")
902 if Options["Component"]:
903 component_ids_list = []
904 for component in split_args(Options["Component"]):
905 component_id = database.get_component_id(component)
906 if component_id == -1:
907 warn("component '%s' not recognised." % (component))
909 component_ids_list.append(component_id)
910 if component_ids_list:
911 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
913 fubar("No valid component given.")
917 # Process architecture
918 con_architectures = ""
919 if Options["Architecture"]:
922 for architecture in split_args(Options["Architecture"]):
923 if architecture == "source":
926 architecture_id = database.get_architecture_id(architecture)
927 if architecture_id == -1:
928 warn("architecture '%s' not recognised." % (architecture))
930 arch_ids_list.append(architecture_id)
932 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
935 fubar("No valid architecture given.")
939 return (con_suites, con_architectures, con_components, check_source)
941 ################################################################################
943 # Inspired(tm) by Bryn Keller's print_exc_plus (See
944 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
947 tb = sys.exc_info()[2]
956 traceback.print_exc()
958 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
959 frame.f_code.co_filename,
961 for key, value in frame.f_locals.items():
962 print "\t%20s = " % key,
966 print "<unable to print>"
968 ################################################################################
970 def try_with_debug(function):
978 ################################################################################
980 # Function for use in sorting lists of architectures.
981 # Sorts normally except that 'source' dominates all others.
983 def arch_compare_sw (a, b):
984 if a == "source" and b == "source":
993 ################################################################################
995 # Split command line arguments which can be separated by either commas
996 # or whitespace. If dwim is set, it will complain about string ending
997 # in comma since this usually means someone did 'dak ls -a i386, m68k
998 # foo' or something and the inevitable confusion resulting from 'm68k'
999 # being treated as an argument is undesirable.
1001 def split_args (s, dwim=1):
1002 if s.find(",") == -1:
1005 if s[-1:] == "," and dwim:
1006 fubar("split_args: found trailing comma, spurious space maybe?")
1009 ################################################################################
1011 def Dict(**dict): return dict
1013 ########################################
1015 # Our very own version of commands.getouputstatus(), hacked to support
1017 def gpgv_get_status_output(cmd, status_read, status_write):
1018 cmd = ['/bin/sh', '-c', cmd]
1019 p2cread, p2cwrite = os.pipe()
1020 c2pread, c2pwrite = os.pipe()
1021 errout, errin = os.pipe()
1031 for i in range(3, 256):
1032 if i != status_write:
1038 os.execvp(cmd[0], cmd)
1044 os.dup2(c2pread, c2pwrite)
1045 os.dup2(errout, errin)
1047 output = status = ""
1049 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1052 r = os.read(fd, 8196)
1054 more_data.append(fd)
1055 if fd == c2pwrite or fd == errin:
1057 elif fd == status_read:
1060 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1062 pid, exit_status = os.waitpid(pid, 0)
1064 os.close(status_write)
1065 os.close(status_read)
1075 return output, status, exit_status
1077 ################################################################################
1079 def process_gpgv_output(status):
1080 # Process the status-fd output
1083 for line in status.split('\n'):
1087 split = line.split()
1089 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1091 (gnupg, keyword) = split[:2]
1092 if gnupg != "[GNUPG:]":
1093 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1096 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1097 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1100 keywords[keyword] = args
1102 return (keywords, internal_error)
1104 ################################################################################
1106 def retrieve_key (filename, keyserver=None, keyring=None):
1107 """Retrieve the key that signed 'filename' from 'keyserver' and
1108 add it to 'keyring'. Returns nothing on success, or an error message
1111 # Defaults for keyserver and keyring
1113 keyserver = Cnf["Dinstall::KeyServer"]
1115 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1117 # Ensure the filename contains no shell meta-characters or other badness
1118 if not re_taint_free.match(filename):
1119 return "%s: tainted filename" % (filename)
1121 # Invoke gpgv on the file
1122 status_read, status_write = os.pipe();
1123 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1124 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1126 # Process the status-fd output
1127 (keywords, internal_error) = process_gpgv_output(status)
1129 return internal_error
1131 if not keywords.has_key("NO_PUBKEY"):
1132 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1134 fingerprint = keywords["NO_PUBKEY"][0]
1135 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1136 # it'll try to create a lockfile in /dev. A better solution might
1137 # be a tempfile or something.
1138 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1139 % (Cnf["Dinstall::SigningKeyring"])
1140 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1141 % (keyring, keyserver, fingerprint)
1142 (result, output) = commands.getstatusoutput(cmd)
1144 return "'%s' failed with exit code %s" % (cmd, result)
1148 ################################################################################
1150 def gpg_keyring_args(keyrings=None):
1152 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1154 return " ".join(["--keyring %s" % x for x in keyrings])
1156 ################################################################################
1158 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1159 """Check the signature of a file and return the fingerprint if the
1160 signature is valid or 'None' if it's not. The first argument is the
1161 filename whose signature should be checked. The second argument is a
1162 reject function and is called when an error is found. The reject()
1163 function must allow for two arguments: the first is the error message,
1164 the second is an optional prefix string. It's possible for reject()
1165 to be called more than once during an invocation of check_signature().
1166 The third argument is optional and is the name of the files the
1167 detached signature applies to. The fourth argument is optional and is
1168 a *list* of keyrings to use. 'autofetch' can either be None, True or
1169 False. If None, the default behaviour specified in the config will be
1172 # Ensure the filename contains no shell meta-characters or other badness
1173 if not re_taint_free.match(sig_filename):
1174 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1177 if data_filename and not re_taint_free.match(data_filename):
1178 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1182 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1184 # Autofetch the signing key if that's enabled
1185 if autofetch == None:
1186 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1188 error_msg = retrieve_key(sig_filename)
1193 # Build the command line
1194 status_read, status_write = os.pipe();
1195 cmd = "gpgv --status-fd %s %s %s %s" % (
1196 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1198 # Invoke gpgv on the file
1199 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1201 # Process the status-fd output
1202 (keywords, internal_error) = process_gpgv_output(status)
1204 # If we failed to parse the status-fd output, let's just whine and bail now
1206 reject("internal error while performing signature check on %s." % (sig_filename))
1207 reject(internal_error, "")
1208 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1212 # Now check for obviously bad things in the processed output
1213 if keywords.has_key("KEYREVOKED"):
1214 reject("The key used to sign %s has been revoked." % (sig_filename))
1216 if keywords.has_key("BADSIG"):
1217 reject("bad signature on %s." % (sig_filename))
1219 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1220 reject("failed to check signature on %s." % (sig_filename))
1222 if keywords.has_key("NO_PUBKEY"):
1223 args = keywords["NO_PUBKEY"]
1226 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1228 if keywords.has_key("BADARMOR"):
1229 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1231 if keywords.has_key("NODATA"):
1232 reject("no signature found in %s." % (sig_filename))
1234 if keywords.has_key("EXPKEYSIG"):
1235 args = keywords["EXPKEYSIG"]
1238 reject("Signature made by expired key 0x%s" % (key))
1240 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1241 args = keywords["KEYEXPIRED"]
1245 if timestamp.count("T") == 0:
1246 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1248 expiredate = timestamp
1249 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1255 # Next check gpgv exited with a zero return code
1257 reject("gpgv failed while checking %s." % (sig_filename))
1259 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1261 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1264 # Sanity check the good stuff we expect
1265 if not keywords.has_key("VALIDSIG"):
1266 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1269 args = keywords["VALIDSIG"]
1271 reject("internal error while checking signature on %s." % (sig_filename))
1274 fingerprint = args[0]
1275 if not keywords.has_key("GOODSIG"):
1276 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1278 if not keywords.has_key("SIG_ID"):
1279 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1282 # Finally ensure there's not something we don't recognise
1283 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1284 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1285 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1287 for keyword in keywords.keys():
1288 if not known_keywords.has_key(keyword):
1289 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1297 ################################################################################
1299 def gpg_get_key_addresses(fingerprint):
1300 """retreive email addresses from gpg key uids for a given fingerprint"""
1301 addresses = key_uid_email_cache.get(fingerprint)
1302 if addresses != None:
1305 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1306 % (gpg_keyring_args(), fingerprint)
1307 (result, output) = commands.getstatusoutput(cmd)
1309 for l in output.split('\n'):
1310 m = re_gpg_uid.match(l)
1312 addresses.add(m.group(1))
1313 key_uid_email_cache[fingerprint] = addresses
1316 ################################################################################
1318 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1320 def wrap(paragraph, max_length, prefix=""):
1324 words = paragraph.split()
1327 word_size = len(word)
1328 if word_size > max_length:
1330 s += line + '\n' + prefix
1331 s += word + '\n' + prefix
1334 new_length = len(line) + word_size + 1
1335 if new_length > max_length:
1336 s += line + '\n' + prefix
1349 ################################################################################
1351 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1352 # Returns fixed 'src'
1353 def clean_symlink (src, dest, root):
1354 src = src.replace(root, '', 1)
1355 dest = dest.replace(root, '', 1)
1356 dest = os.path.dirname(dest)
1357 new_src = '../' * len(dest.split('/'))
1358 return new_src + src
1360 ################################################################################
1362 def temp_filename(directory=None, dotprefix=None, perms=0700):
1363 """Return a secure and unique filename by pre-creating it.
1364 If 'directory' is non-null, it will be the directory the file is pre-created in.
1365 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1368 old_tempdir = tempfile.tempdir
1369 tempfile.tempdir = directory
1371 filename = tempfile.mktemp()
1374 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1375 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1379 tempfile.tempdir = old_tempdir
1383 ################################################################################
1385 # checks if the user part of the email is listed in the alias file
1387 def is_email_alias(email):
1389 if alias_cache == None:
1390 aliasfn = which_alias_file()
1393 for l in open(aliasfn):
1394 alias_cache.add(l.split(':')[0])
1395 uid = email.split('@')[0]
1396 return uid in alias_cache
1398 ################################################################################
1402 Cnf = apt_pkg.newConfiguration()
1403 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1405 if which_conf_file() != default_config:
1406 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1408 ################################################################################