2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
29 from dak_exceptions import *
31 ################################################################################
33 re_comments = re.compile(r"\#.*")
34 re_no_epoch = re.compile(r"^\d+\:")
35 re_no_revision = re.compile(r"-[^-]+$")
36 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
37 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
38 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
39 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
41 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
42 re_multi_line_field = re.compile(r"^\s(.*)")
43 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
45 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
46 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
48 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
49 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
51 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
53 default_config = "/etc/dak/dak.conf"
54 default_apt_config = "/etc/dak/apt.conf"
57 key_uid_email_cache = {}
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))]
63 ################################################################################
65 def open_file(filename, mode='r'):
67 f = open(filename, mode)
69 raise CantOpenError, filename
72 ################################################################################
74 def our_raw_input(prompt=""):
76 sys.stdout.write(prompt)
82 sys.stderr.write("\nUser interrupt (^D).\n")
85 ################################################################################
87 def extract_component_from_section(section):
90 if section.find('/') != -1:
91 component = section.split('/')[0]
93 # Expand default component
95 if Cnf.has_key("Component::%s" % section):
100 return (section, component)
102 ################################################################################
104 def parse_deb822(contents, signing_rules=0):
108 # Split the lines in the input, keeping the linebreaks.
109 lines = contents.splitlines(True)
112 raise ParseChangesError, "[Empty changes file]"
114 # Reindex by line number so we can easily verify the format of
120 indexed_lines[index] = line[:-1]
124 num_of_lines = len(indexed_lines.keys())
127 while index < num_of_lines:
129 line = indexed_lines[index]
131 if signing_rules == 1:
133 if index > num_of_lines:
134 raise InvalidDscError, index
135 line = indexed_lines[index]
136 if not line.startswith("-----BEGIN PGP SIGNATURE"):
137 raise InvalidDscError, index
142 if line.startswith("-----BEGIN PGP SIGNATURE"):
144 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
146 if signing_rules == 1:
147 while index < num_of_lines and line != "":
149 line = indexed_lines[index]
151 # If we're not inside the signed data, don't process anything
152 if signing_rules >= 0 and not inside_signature:
154 slf = re_single_line_field.match(line)
156 field = slf.groups()[0].lower()
157 changes[field] = slf.groups()[1]
161 changes[field] += '\n'
163 mlf = re_multi_line_field.match(line)
166 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
167 if first == 1 and changes[field] != "":
168 changes[field] += '\n'
170 changes[field] += mlf.groups()[0] + '\n'
174 if signing_rules == 1 and inside_signature:
175 raise InvalidDscError, index
177 changes["filecontents"] = "".join(lines)
179 if changes.has_key("source"):
180 # Strip the source version in brackets from the source field,
181 # put it in the "source-version" field instead.
182 srcver = re_srchasver.search(changes["source"])
184 changes["source"] = srcver.group(1)
185 changes["source-version"] = srcver.group(2)
188 raise ParseChangesError, error
192 ################################################################################
194 def parse_changes(filename, signing_rules=0):
195 """Parses a changes file and returns a dictionary where each field is a
196 key. The mandatory first argument is the filename of the .changes
199 signing_rules is an optional argument:
201 o If signing_rules == -1, no signature is required.
202 o If signing_rules == 0 (the default), a signature is required.
203 o If signing_rules == 1, it turns on the same strict format checking
206 The rules for (signing_rules == 1)-mode are:
208 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
209 followed by any PGP header data and must end with a blank line.
211 o The data section must end with a blank line and must be followed by
212 "-----BEGIN PGP SIGNATURE-----".
215 changes_in = open_file(filename)
216 content = changes_in.read()
218 return parse_deb822(content, signing_rules)
220 ################################################################################
222 def hash_key(hashname):
223 return '%ssum' % hashname
225 ################################################################################
227 def create_hash(where, files, hashname, hashfunc):
228 """create_hash extends the passed files dict with the given hash by
229 iterating over all files on disk and passing them to the hashing
233 for f in files.keys():
235 file_handle = open_file(f)
236 except CantOpenError:
237 rejmsg.append("Could not open file %s for checksumming" % (f))
239 files[f][hash_key(hashname)] = hashfunc(file_handle)
244 ################################################################################
246 def check_hash(where, files, hashname, hashfunc):
247 """check_hash checks the given hash in the files dict against the actual
248 files on disk. The hash values need to be present consistently in
249 all file entries. It does not modify its input in any way."""
252 for f in files.keys():
256 file_handle = open_file(f)
258 # Check for the hash entry, to not trigger a KeyError.
259 if not files[f].has_key(hash_key(hashname)):
260 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
264 # Actually check the hash for correctness.
265 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
266 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
268 except CantOpenError:
269 # TODO: This happens when the file is in the pool.
270 # warn("Cannot open file %s" % f)
277 ################################################################################
279 def check_size(where, files):
280 """check_size checks the file sizes in the passed files dict against the
284 for f in files.keys():
289 # TODO: This happens when the file is in the pool.
293 actual_size = entry[stat.ST_SIZE]
294 size = int(files[f]["size"])
295 if size != actual_size:
296 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
297 % (f, actual_size, size, where))
300 ################################################################################
302 def check_hash_fields(what, manifest):
303 """check_hash_fields ensures that there are no checksum fields in the
304 given dict that we do not know about."""
307 hashes = map(lambda x: x[0], known_hashes)
308 for field in manifest:
309 if field.startswith("checksums-"):
310 hashname = field.split("-",1)[1]
311 if hashname not in hashes:
312 rejmsg.append("Unsupported checksum field for %s "\
313 "in %s" % (hashname, what))
316 ################################################################################
318 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
319 if format >= version:
320 # The version should contain the specified hash.
323 # Import hashes from the changes
324 rejmsg = parse_checksums(".changes", files, changes, hashname)
328 # We need to calculate the hash because it can't possibly
331 return func(".changes", files, hashname, hashfunc)
333 # We could add the orig which might be in the pool to the files dict to
334 # access the checksums easily.
336 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
337 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
338 in the dsc is correct, i.e. identical to the changes file and if necessary
339 the pool. The latter task is delegated to check_hash."""
342 if not dsc.has_key('Checksums-%s' % (hashname,)):
344 # Import hashes from the dsc
345 parse_checksums(".dsc", dsc_files, dsc, hashname)
347 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
350 ################################################################################
352 def ensure_hashes(changes, dsc, files, dsc_files):
355 # Make sure we recognise the format of the Files: field in the .changes
356 format = changes.get("format", "0.0").split(".", 1)
358 format = int(format[0]), int(format[1])
360 format = int(float(format[0])), 0
362 # We need to deal with the original changes blob, as the fields we need
363 # might not be in the changes dict serialised into the .dak anymore.
364 orig_changes = parse_deb822(changes['filecontents'])
366 # Copy the checksums over to the current changes dict. This will keep
367 # the existing modifications to it intact.
368 for field in orig_changes:
369 if field.startswith('checksums-'):
370 changes[field] = orig_changes[field]
372 # Check for unsupported hashes
373 rejmsg.extend(check_hash_fields(".changes", changes))
374 rejmsg.extend(check_hash_fields(".dsc", dsc))
376 # We have to calculate the hash if we have an earlier changes version than
377 # the hash appears in rather than require it exist in the changes file
378 for hashname, hashfunc, version in known_hashes:
379 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
381 if "source" in changes["architecture"]:
382 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
387 def parse_checksums(where, files, manifest, hashname):
389 field = 'checksums-%s' % hashname
390 if not field in manifest:
392 input = manifest[field]
393 for line in input.split('\n'):
396 hash, size, file = line.strip().split(' ')
397 if not files.has_key(file):
398 # TODO: check for the file's entry in the original files dict, not
399 # the one modified by (auto)byhand and other weird stuff
400 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
401 # (file, hashname, where))
403 if not files[file]["size"] == size:
404 rejmsg.append("%s: size differs for files and checksums-%s entry "\
405 "in %s" % (file, hashname, where))
407 files[file][hash_key(hashname)] = hash
408 for f in files.keys():
409 if not files[f].has_key(hash_key(hashname)):
410 rejmsg.append("%s: no entry in checksums-%s in %s" % (file,
414 ################################################################################
416 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
418 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
421 # Make sure we have a Files: field to parse...
422 if not changes.has_key(field):
423 raise NoFilesFieldError
425 # Make sure we recognise the format of the Files: field
426 format = re_verwithext.search(changes.get("format", "0.0"))
428 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
430 format = format.groups()
431 if format[1] == None:
432 format = int(float(format[0])), 0, format[2]
434 format = int(format[0]), int(format[1]), format[2]
435 if format[2] == None:
440 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
442 if (format < (1,5) or format > (1,8)):
443 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
444 if field != "files" and format < (1,8):
445 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
447 includes_section = (not is_a_dsc) and field == "files"
449 # Parse each entry/line:
450 for i in changes[field].split('\n'):
454 section = priority = ""
457 (md5, size, section, priority, name) = s
459 (md5, size, name) = s
461 raise ParseChangesError, i
468 (section, component) = extract_component_from_section(section)
470 files[name] = Dict(size=size, section=section,
471 priority=priority, component=component)
472 files[name][hashname] = md5
476 ################################################################################
478 def force_to_utf8(s):
479 """Forces a string to UTF-8. If the string isn't already UTF-8,
480 it's assumed to be ISO-8859-1."""
485 latin1_s = unicode(s,'iso8859-1')
486 return latin1_s.encode('utf-8')
488 def rfc2047_encode(s):
489 """Encodes a (header) string per RFC2047 if necessary. If the
490 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
492 codecs.lookup('ascii')[1](s)
497 codecs.lookup('utf-8')[1](s)
498 h = email.Header.Header(s, 'utf-8', 998)
501 h = email.Header.Header(s, 'iso-8859-1', 998)
504 ################################################################################
506 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
507 # with it. I know - I'll fix the suckage and make things
510 def fix_maintainer (maintainer):
511 """Parses a Maintainer or Changed-By field and returns:
512 (1) an RFC822 compatible version,
513 (2) an RFC2047 compatible version,
517 The name is forced to UTF-8 for both (1) and (3). If the name field
518 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
519 switched to 'email (name)' format."""
520 maintainer = maintainer.strip()
522 return ('', '', '', '')
524 if maintainer.find("<") == -1:
527 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
528 email = maintainer[1:-1]
531 m = re_parse_maintainer.match(maintainer)
533 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
537 # Get an RFC2047 compliant version of the name
538 rfc2047_name = rfc2047_encode(name)
540 # Force the name to be UTF-8
541 name = force_to_utf8(name)
543 if name.find(',') != -1 or name.find('.') != -1:
544 rfc822_maint = "%s (%s)" % (email, name)
545 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
547 rfc822_maint = "%s <%s>" % (name, email)
548 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
550 if email.find("@") == -1 and email.find("buildd_") != 0:
551 raise ParseMaintError, "No @ found in email address part."
553 return (rfc822_maint, rfc2047_maint, name, email)
555 ################################################################################
557 # sendmail wrapper, takes _either_ a message string or a file as arguments
558 def send_mail (message, filename=""):
559 # If we've been passed a string dump it into a temporary file
561 filename = tempfile.mktemp()
562 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
563 os.write (fd, message)
567 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
569 raise SendmailFailedError, output
571 # Clean up any temporary files
575 ################################################################################
577 def poolify (source, component):
580 if source[:3] == "lib":
581 return component + source[:4] + '/' + source + '/'
583 return component + source[:1] + '/' + source + '/'
585 ################################################################################
587 def move (src, dest, overwrite = 0, perms = 0664):
588 if os.path.exists(dest) and os.path.isdir(dest):
591 dest_dir = os.path.dirname(dest)
592 if not os.path.exists(dest_dir):
593 umask = os.umask(00000)
594 os.makedirs(dest_dir, 02775)
596 #print "Moving %s to %s..." % (src, dest)
597 if os.path.exists(dest) and os.path.isdir(dest):
598 dest += '/' + os.path.basename(src)
599 # Don't overwrite unless forced to
600 if os.path.exists(dest):
602 fubar("Can't move %s to %s - file already exists." % (src, dest))
604 if not os.access(dest, os.W_OK):
605 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
606 shutil.copy2(src, dest)
607 os.chmod(dest, perms)
610 def copy (src, dest, overwrite = 0, perms = 0664):
611 if os.path.exists(dest) and os.path.isdir(dest):
614 dest_dir = os.path.dirname(dest)
615 if not os.path.exists(dest_dir):
616 umask = os.umask(00000)
617 os.makedirs(dest_dir, 02775)
619 #print "Copying %s to %s..." % (src, dest)
620 if os.path.exists(dest) and os.path.isdir(dest):
621 dest += '/' + os.path.basename(src)
622 # Don't overwrite unless forced to
623 if os.path.exists(dest):
625 raise FileExistsError
627 if not os.access(dest, os.W_OK):
628 raise CantOverwriteError
629 shutil.copy2(src, dest)
630 os.chmod(dest, perms)
632 ################################################################################
635 res = socket.gethostbyaddr(socket.gethostname())
636 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
637 if database_hostname:
638 return database_hostname
642 def which_conf_file ():
643 res = socket.gethostbyaddr(socket.gethostname())
644 if Cnf.get("Config::" + res[0] + "::DakConfig"):
645 return Cnf["Config::" + res[0] + "::DakConfig"]
647 return default_config
649 def which_apt_conf_file ():
650 res = socket.gethostbyaddr(socket.gethostname())
651 if Cnf.get("Config::" + res[0] + "::AptConfig"):
652 return Cnf["Config::" + res[0] + "::AptConfig"]
654 return default_apt_config
656 def which_alias_file():
657 hostname = socket.gethostbyaddr(socket.gethostname())[0]
658 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
659 if os.path.exists(aliasfn):
664 ################################################################################
666 # Escape characters which have meaning to SQL's regex comparison operator ('~')
667 # (woefully incomplete)
670 s = s.replace('+', '\\\\+')
671 s = s.replace('.', '\\\\.')
674 ################################################################################
676 # Perform a substition of template
677 def TemplateSubst(map, filename):
678 file = open_file(filename)
679 template = file.read()
681 template = template.replace(x,map[x])
685 ################################################################################
687 def fubar(msg, exit_code=1):
688 sys.stderr.write("E: %s\n" % (msg))
692 sys.stderr.write("W: %s\n" % (msg))
694 ################################################################################
696 # Returns the user name with a laughable attempt at rfc822 conformancy
697 # (read: removing stray periods).
699 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
701 ################################################################################
711 return ("%d%s" % (c, t))
713 ################################################################################
715 def cc_fix_changes (changes):
716 o = changes.get("architecture", "")
718 del changes["architecture"]
719 changes["architecture"] = {}
721 changes["architecture"][j] = 1
723 # Sort by source name, source version, 'have source', and then by filename
724 def changes_compare (a, b):
726 a_changes = parse_changes(a)
731 b_changes = parse_changes(b)
735 cc_fix_changes (a_changes)
736 cc_fix_changes (b_changes)
738 # Sort by source name
739 a_source = a_changes.get("source")
740 b_source = b_changes.get("source")
741 q = cmp (a_source, b_source)
745 # Sort by source version
746 a_version = a_changes.get("version", "0")
747 b_version = b_changes.get("version", "0")
748 q = apt_pkg.VersionCompare(a_version, b_version)
752 # Sort by 'have source'
753 a_has_source = a_changes["architecture"].get("source")
754 b_has_source = b_changes["architecture"].get("source")
755 if a_has_source and not b_has_source:
757 elif b_has_source and not a_has_source:
760 # Fall back to sort by filename
763 ################################################################################
765 def find_next_free (dest, too_many=100):
768 while os.path.exists(dest) and extra < too_many:
769 dest = orig_dest + '.' + repr(extra)
771 if extra >= too_many:
772 raise NoFreeFilenameError
775 ################################################################################
777 def result_join (original, sep = '\t'):
779 for i in xrange(len(original)):
780 if original[i] == None:
783 list.append(original[i])
784 return sep.join(list)
786 ################################################################################
788 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
790 for line in str.split('\n'):
792 if line or include_blank_lines:
793 out += "%s%s\n" % (prefix, line)
794 # Strip trailing new line
799 ################################################################################
801 def validate_changes_file_arg(filename, require_changes=1):
802 """'filename' is either a .changes or .dak file. If 'filename' is a
803 .dak file, it's changed to be the corresponding .changes file. The
804 function then checks if the .changes file a) exists and b) is
805 readable and returns the .changes filename if so. If there's a
806 problem, the next action depends on the option 'require_changes'
809 o If 'require_changes' == -1, errors are ignored and the .changes
810 filename is returned.
811 o If 'require_changes' == 0, a warning is given and 'None' is returned.
812 o If 'require_changes' == 1, a fatal error is raised.
816 orig_filename = filename
817 if filename.endswith(".dak"):
818 filename = filename[:-4]+".changes"
820 if not filename.endswith(".changes"):
821 error = "invalid file type; not a changes file"
823 if not os.access(filename,os.R_OK):
824 if os.path.exists(filename):
825 error = "permission denied"
827 error = "file not found"
830 if require_changes == 1:
831 fubar("%s: %s." % (orig_filename, error))
832 elif require_changes == 0:
833 warn("Skipping %s - %s" % (orig_filename, error))
835 else: # We only care about the .dak file
840 ################################################################################
843 return (arch != "source" and arch != "all")
845 ################################################################################
847 def join_with_commas_and(list):
848 if len(list) == 0: return "nothing"
849 if len(list) == 1: return list[0]
850 return ", ".join(list[:-1]) + " and " + list[-1]
852 ################################################################################
857 (pkg, version, constraint) = atom
859 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
862 pp_deps.append(pp_dep)
863 return " |".join(pp_deps)
865 ################################################################################
870 ################################################################################
872 # Handle -a, -c and -s arguments; returns them as SQL constraints
873 def parse_args(Options):
877 for suite in split_args(Options["Suite"]):
878 suite_id = database.get_suite_id(suite)
880 warn("suite '%s' not recognised." % (suite))
882 suite_ids_list.append(suite_id)
884 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
886 fubar("No valid suite given.")
891 if Options["Component"]:
892 component_ids_list = []
893 for component in split_args(Options["Component"]):
894 component_id = database.get_component_id(component)
895 if component_id == -1:
896 warn("component '%s' not recognised." % (component))
898 component_ids_list.append(component_id)
899 if component_ids_list:
900 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
902 fubar("No valid component given.")
906 # Process architecture
907 con_architectures = ""
908 if Options["Architecture"]:
911 for architecture in split_args(Options["Architecture"]):
912 if architecture == "source":
915 architecture_id = database.get_architecture_id(architecture)
916 if architecture_id == -1:
917 warn("architecture '%s' not recognised." % (architecture))
919 arch_ids_list.append(architecture_id)
921 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
924 fubar("No valid architecture given.")
928 return (con_suites, con_architectures, con_components, check_source)
930 ################################################################################
932 # Inspired(tm) by Bryn Keller's print_exc_plus (See
933 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
936 tb = sys.exc_info()[2]
945 traceback.print_exc()
947 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
948 frame.f_code.co_filename,
950 for key, value in frame.f_locals.items():
951 print "\t%20s = " % key,
955 print "<unable to print>"
957 ################################################################################
959 def try_with_debug(function):
967 ################################################################################
969 # Function for use in sorting lists of architectures.
970 # Sorts normally except that 'source' dominates all others.
972 def arch_compare_sw (a, b):
973 if a == "source" and b == "source":
982 ################################################################################
984 # Split command line arguments which can be separated by either commas
985 # or whitespace. If dwim is set, it will complain about string ending
986 # in comma since this usually means someone did 'dak ls -a i386, m68k
987 # foo' or something and the inevitable confusion resulting from 'm68k'
988 # being treated as an argument is undesirable.
990 def split_args (s, dwim=1):
991 if s.find(",") == -1:
994 if s[-1:] == "," and dwim:
995 fubar("split_args: found trailing comma, spurious space maybe?")
998 ################################################################################
1000 def Dict(**dict): return dict
1002 ########################################
1004 # Our very own version of commands.getouputstatus(), hacked to support
1006 def gpgv_get_status_output(cmd, status_read, status_write):
1007 cmd = ['/bin/sh', '-c', cmd]
1008 p2cread, p2cwrite = os.pipe()
1009 c2pread, c2pwrite = os.pipe()
1010 errout, errin = os.pipe()
1020 for i in range(3, 256):
1021 if i != status_write:
1027 os.execvp(cmd[0], cmd)
1033 os.dup2(c2pread, c2pwrite)
1034 os.dup2(errout, errin)
1036 output = status = ""
1038 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1041 r = os.read(fd, 8196)
1043 more_data.append(fd)
1044 if fd == c2pwrite or fd == errin:
1046 elif fd == status_read:
1049 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1051 pid, exit_status = os.waitpid(pid, 0)
1053 os.close(status_write)
1054 os.close(status_read)
1064 return output, status, exit_status
1066 ################################################################################
1068 def process_gpgv_output(status):
1069 # Process the status-fd output
1072 for line in status.split('\n'):
1076 split = line.split()
1078 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1080 (gnupg, keyword) = split[:2]
1081 if gnupg != "[GNUPG:]":
1082 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1085 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1086 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1089 keywords[keyword] = args
1091 return (keywords, internal_error)
1093 ################################################################################
1095 def retrieve_key (filename, keyserver=None, keyring=None):
1096 """Retrieve the key that signed 'filename' from 'keyserver' and
1097 add it to 'keyring'. Returns nothing on success, or an error message
1100 # Defaults for keyserver and keyring
1102 keyserver = Cnf["Dinstall::KeyServer"]
1104 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1106 # Ensure the filename contains no shell meta-characters or other badness
1107 if not re_taint_free.match(filename):
1108 return "%s: tainted filename" % (filename)
1110 # Invoke gpgv on the file
1111 status_read, status_write = os.pipe();
1112 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1113 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1115 # Process the status-fd output
1116 (keywords, internal_error) = process_gpgv_output(status)
1118 return internal_error
1120 if not keywords.has_key("NO_PUBKEY"):
1121 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1123 fingerprint = keywords["NO_PUBKEY"][0]
1124 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1125 # it'll try to create a lockfile in /dev. A better solution might
1126 # be a tempfile or something.
1127 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1128 % (Cnf["Dinstall::SigningKeyring"])
1129 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1130 % (keyring, keyserver, fingerprint)
1131 (result, output) = commands.getstatusoutput(cmd)
1133 return "'%s' failed with exit code %s" % (cmd, result)
1137 ################################################################################
1139 def gpg_keyring_args(keyrings=None):
1141 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1143 return " ".join(["--keyring %s" % x for x in keyrings])
1145 ################################################################################
1147 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1148 """Check the signature of a file and return the fingerprint if the
1149 signature is valid or 'None' if it's not. The first argument is the
1150 filename whose signature should be checked. The second argument is a
1151 reject function and is called when an error is found. The reject()
1152 function must allow for two arguments: the first is the error message,
1153 the second is an optional prefix string. It's possible for reject()
1154 to be called more than once during an invocation of check_signature().
1155 The third argument is optional and is the name of the files the
1156 detached signature applies to. The fourth argument is optional and is
1157 a *list* of keyrings to use. 'autofetch' can either be None, True or
1158 False. If None, the default behaviour specified in the config will be
1161 # Ensure the filename contains no shell meta-characters or other badness
1162 if not re_taint_free.match(sig_filename):
1163 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1166 if data_filename and not re_taint_free.match(data_filename):
1167 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1171 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1173 # Autofetch the signing key if that's enabled
1174 if autofetch == None:
1175 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1177 error_msg = retrieve_key(sig_filename)
1182 # Build the command line
1183 status_read, status_write = os.pipe();
1184 cmd = "gpgv --status-fd %s %s %s %s" % (
1185 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1187 # Invoke gpgv on the file
1188 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1190 # Process the status-fd output
1191 (keywords, internal_error) = process_gpgv_output(status)
1193 # If we failed to parse the status-fd output, let's just whine and bail now
1195 reject("internal error while performing signature check on %s." % (sig_filename))
1196 reject(internal_error, "")
1197 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1201 # Now check for obviously bad things in the processed output
1202 if keywords.has_key("KEYREVOKED"):
1203 reject("The key used to sign %s has been revoked." % (sig_filename))
1205 if keywords.has_key("BADSIG"):
1206 reject("bad signature on %s." % (sig_filename))
1208 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1209 reject("failed to check signature on %s." % (sig_filename))
1211 if keywords.has_key("NO_PUBKEY"):
1212 args = keywords["NO_PUBKEY"]
1215 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1217 if keywords.has_key("BADARMOR"):
1218 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1220 if keywords.has_key("NODATA"):
1221 reject("no signature found in %s." % (sig_filename))
1223 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1224 args = keywords["KEYEXPIRED"]
1227 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1233 # Next check gpgv exited with a zero return code
1235 reject("gpgv failed while checking %s." % (sig_filename))
1237 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1239 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1242 # Sanity check the good stuff we expect
1243 if not keywords.has_key("VALIDSIG"):
1244 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1247 args = keywords["VALIDSIG"]
1249 reject("internal error while checking signature on %s." % (sig_filename))
1252 fingerprint = args[0]
1253 if not keywords.has_key("GOODSIG"):
1254 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1256 if not keywords.has_key("SIG_ID"):
1257 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1260 # Finally ensure there's not something we don't recognise
1261 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1262 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1263 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1265 for keyword in keywords.keys():
1266 if not known_keywords.has_key(keyword):
1267 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1275 ################################################################################
1277 def gpg_get_key_addresses(fingerprint):
1278 """retreive email addresses from gpg key uids for a given fingerprint"""
1279 addresses = key_uid_email_cache.get(fingerprint)
1280 if addresses != None:
1283 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1284 % (gpg_keyring_args(), fingerprint)
1285 (result, output) = commands.getstatusoutput(cmd)
1287 for l in output.split('\n'):
1288 m = re_gpg_uid.match(l)
1290 addresses.add(m.group(1))
1291 key_uid_email_cache[fingerprint] = addresses
1294 ################################################################################
1296 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1298 def wrap(paragraph, max_length, prefix=""):
1302 words = paragraph.split()
1305 word_size = len(word)
1306 if word_size > max_length:
1308 s += line + '\n' + prefix
1309 s += word + '\n' + prefix
1312 new_length = len(line) + word_size + 1
1313 if new_length > max_length:
1314 s += line + '\n' + prefix
1327 ################################################################################
1329 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1330 # Returns fixed 'src'
1331 def clean_symlink (src, dest, root):
1332 src = src.replace(root, '', 1)
1333 dest = dest.replace(root, '', 1)
1334 dest = os.path.dirname(dest)
1335 new_src = '../' * len(dest.split('/'))
1336 return new_src + src
1338 ################################################################################
1340 def temp_filename(directory=None, dotprefix=None, perms=0700):
1341 """Return a secure and unique filename by pre-creating it.
1342 If 'directory' is non-null, it will be the directory the file is pre-created in.
1343 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1346 old_tempdir = tempfile.tempdir
1347 tempfile.tempdir = directory
1349 filename = tempfile.mktemp()
1352 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1353 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1357 tempfile.tempdir = old_tempdir
1361 ################################################################################
1363 # checks if the user part of the email is listed in the alias file
1365 def is_email_alias(email):
1367 if alias_cache == None:
1368 aliasfn = which_alias_file()
1371 for l in open(aliasfn):
1372 alias_cache.add(l.split(':')[0])
1373 uid = email.split('@')[0]
1374 return uid in alias_cache
1376 ################################################################################
1380 Cnf = apt_pkg.newConfiguration()
1381 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1383 if which_conf_file() != default_config:
1384 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1386 ################################################################################