4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 ################################################################################
8 # This program is free software; you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation; either version 2 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program; if not, write to the Free Software
20 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 ################################################################################
24 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
25 sys, tempfile, traceback, stat
28 from dak_exceptions import *
30 ################################################################################
32 re_comments = re.compile(r"\#.*")
33 re_no_epoch = re.compile(r"^\d+\:")
34 re_no_revision = re.compile(r"-[^-]+$")
35 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
36 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
37 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
38 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
40 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
41 re_multi_line_field = re.compile(r"^\s(.*)")
42 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
44 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
45 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
47 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
48 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
50 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
52 default_config = "/etc/dak/dak.conf"
53 default_apt_config = "/etc/dak/apt.conf"
56 key_uid_email_cache = {}
58 # (hashname, function, earliest_changes_version)
59 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
60 ("sha256", apt_pkg.sha256sum, (1, 8))]
62 ################################################################################
64 def open_file(filename, mode='r'):
66 f = open(filename, mode)
68 raise CantOpenError, filename
71 ################################################################################
73 def our_raw_input(prompt=""):
75 sys.stdout.write(prompt)
81 sys.stderr.write("\nUser interrupt (^D).\n")
84 ################################################################################
86 def extract_component_from_section(section):
89 if section.find('/') != -1:
90 component = section.split('/')[0]
92 # Expand default component
94 if Cnf.has_key("Component::%s" % section):
99 return (section, component)
101 ################################################################################
103 def parse_changes(filename, signing_rules=0):
104 """Parses a changes file and returns a dictionary where each field is a
105 key. The mandatory first argument is the filename of the .changes
108 signing_rules is an optional argument:
110 o If signing_rules == -1, no signature is required.
111 o If signing_rules == 0 (the default), a signature is required.
112 o If signing_rules == 1, it turns on the same strict format checking
115 The rules for (signing_rules == 1)-mode are:
117 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
118 followed by any PGP header data and must end with a blank line.
120 o The data section must end with a blank line and must be followed by
121 "-----BEGIN PGP SIGNATURE-----".
127 changes_in = open_file(filename)
128 lines = changes_in.readlines()
131 raise ParseChangesError, "[Empty changes file]"
133 # Reindex by line number so we can easily verify the format of
139 indexed_lines[index] = line[:-1]
143 num_of_lines = len(indexed_lines.keys())
146 while index < num_of_lines:
148 line = indexed_lines[index]
150 if signing_rules == 1:
152 if index > num_of_lines:
153 raise InvalidDscError, index
154 line = indexed_lines[index]
155 if not line.startswith("-----BEGIN PGP SIGNATURE"):
156 raise InvalidDscError, index
161 if line.startswith("-----BEGIN PGP SIGNATURE"):
163 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
165 if signing_rules == 1:
166 while index < num_of_lines and line != "":
168 line = indexed_lines[index]
170 # If we're not inside the signed data, don't process anything
171 if signing_rules >= 0 and not inside_signature:
173 slf = re_single_line_field.match(line)
175 field = slf.groups()[0].lower()
176 changes[field] = slf.groups()[1]
180 changes[field] += '\n'
182 mlf = re_multi_line_field.match(line)
185 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
186 if first == 1 and changes[field] != "":
187 changes[field] += '\n'
189 changes[field] += mlf.groups()[0] + '\n'
193 if signing_rules == 1 and inside_signature:
194 raise InvalidDscError, index
197 changes["filecontents"] = "".join(lines)
199 if changes.has_key("source"):
200 # Strip the source version in brackets from the source field,
201 # put it in the "source-version" field instead.
202 srcver = re_srchasver.search(changes["source"])
204 changes["source"] = srcver.group(1)
205 changes["source-version"] = srcver.group(2)
208 raise ParseChangesError, error
212 ################################################################################
214 def create_hash (lfiles, key, testfn, basedict = None):
216 for f in lfiles.keys():
218 file_handle = open_file(f)
219 except CantOpenError:
220 rejmsg.append("Could not open file %s for checksumming" % (f))
223 if basedict and basedict.has_key(f):
224 basedict[f]['%ssum' % key] = testfn(file_handle)
229 ################################################################################
231 def check_hash (where, lfiles, key, testfn, basedict = None):
234 for f in basedict.keys():
236 rejmsg.append("%s: no %s checksum" % (f, key))
238 for f in lfiles.keys():
239 if basedict and f not in basedict:
240 rejmsg.append("%s: extraneous entry in %s checksums" % (f, key))
243 file_handle = open_file(f)
244 except CantOpenError:
248 if testfn(file_handle) != lfiles[f][key]:
249 rejmsg.append("%s: %s check failed." % (f, key))
251 # Store the hashes for later use
253 basedict[f]['%ssum' % key] = lfiles[f][key]
255 actual_size = os.stat(f)[stat.ST_SIZE]
256 size = int(lfiles[f]["size"])
257 if size != actual_size:
258 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
259 % (f, actual_size, size, where))
263 ################################################################################
265 def ensure_hashes(changes, dsc, files, dsc_files):
266 # Make sure we recognise the format of the Files: field
267 format = changes.get("format", "0.0").split(".",1)
269 format = int(format[0]), int(format[1])
271 format = int(float(format[0])), 0
275 if x.startswith("checksum-"):
276 h = x.split("-",1)[1]
277 if h not in dict(known_hashes):
278 rejmsg.append("Unsupported checksum field in .changes" % (h))
281 if x.startswith("checksum-"):
282 h = x.split("-",1)[1]
283 if h not in dict(known_hashes):
284 rejmsg.append("Unsupported checksum field in .dsc" % (h))
287 fs_m = build_file_list(changes, 0)
288 fs_md = build_file_list(dsc, 1)
290 # We have to calculate the hash if we have an earlier changes version than
291 # the hash appears in rather than require it exist in the changes file
292 # I hate backwards compatibility
293 for h,f,v in known_hashes:
297 for m in create_hash(fs_m, h, f, files):
300 fs = build_file_list(changes, 0, "checksums-%s" % h, h)
301 for m in check_hash(".changes %s" % (h), fs, h, f, files):
303 except NoFilesFieldError:
304 rejmsg.append("No Checksums-%s: field in .changes" % (h))
305 except UnknownFormatError, format:
306 rejmsg.append("%s: unknown format of .changes" % (format))
307 except ParseChangesError, line:
308 rejmsg.append("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
310 if "source" not in changes["architecture"]: continue
314 for m in create_hash(fs_md, h, f, dsc_files):
317 fs = build_file_list(dsc, 1, "checksums-%s" % h, h)
318 for m in check_hash(".dsc %s" % (h), fs, h, f, dsc_files):
320 except UnknownFormatError, format:
321 rejmsg.append("%s: unknown format of .dsc" % (format))
322 except NoFilesFieldError:
323 rejmsg.append("No Checksums-%s: field in .dsc" % (h))
324 except ParseChangesError, line:
325 rejmsg.append("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
329 ################################################################################
331 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
333 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
336 # Make sure we have a Files: field to parse...
337 if not changes.has_key(field):
338 raise NoFilesFieldError
340 # Make sure we recognise the format of the Files: field
341 format = re_verwithext.search(changes.get("format", "0.0"))
343 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
345 format = format.groups()
346 if format[1] == None:
347 format = int(float(format[0])), 0, format[2]
349 format = int(format[0]), int(format[1]), format[2]
350 if format[2] == None:
355 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
357 if (format < (1,5) or format > (1,8)):
358 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
359 if field != "files" and format < (1,8):
360 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
362 includes_section = (not is_a_dsc) and field == "files"
364 # Parse each entry/line:
365 for i in changes[field].split('\n'):
369 section = priority = ""
372 (md5, size, section, priority, name) = s
374 (md5, size, name) = s
376 raise ParseChangesError, i
383 (section, component) = extract_component_from_section(section)
385 files[name] = Dict(size=size, section=section,
386 priority=priority, component=component)
387 files[name][hashname] = md5
391 ################################################################################
393 def force_to_utf8(s):
394 """Forces a string to UTF-8. If the string isn't already UTF-8,
395 it's assumed to be ISO-8859-1."""
400 latin1_s = unicode(s,'iso8859-1')
401 return latin1_s.encode('utf-8')
403 def rfc2047_encode(s):
404 """Encodes a (header) string per RFC2047 if necessary. If the
405 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
407 codecs.lookup('ascii')[1](s)
412 codecs.lookup('utf-8')[1](s)
413 h = email.Header.Header(s, 'utf-8', 998)
416 h = email.Header.Header(s, 'iso-8859-1', 998)
419 ################################################################################
421 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
422 # with it. I know - I'll fix the suckage and make things
425 def fix_maintainer (maintainer):
426 """Parses a Maintainer or Changed-By field and returns:
427 (1) an RFC822 compatible version,
428 (2) an RFC2047 compatible version,
432 The name is forced to UTF-8 for both (1) and (3). If the name field
433 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
434 switched to 'email (name)' format."""
435 maintainer = maintainer.strip()
437 return ('', '', '', '')
439 if maintainer.find("<") == -1:
442 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
443 email = maintainer[1:-1]
446 m = re_parse_maintainer.match(maintainer)
448 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
452 # Get an RFC2047 compliant version of the name
453 rfc2047_name = rfc2047_encode(name)
455 # Force the name to be UTF-8
456 name = force_to_utf8(name)
458 if name.find(',') != -1 or name.find('.') != -1:
459 rfc822_maint = "%s (%s)" % (email, name)
460 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
462 rfc822_maint = "%s <%s>" % (name, email)
463 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
465 if email.find("@") == -1 and email.find("buildd_") != 0:
466 raise ParseMaintError, "No @ found in email address part."
468 return (rfc822_maint, rfc2047_maint, name, email)
470 ################################################################################
472 # sendmail wrapper, takes _either_ a message string or a file as arguments
473 def send_mail (message, filename=""):
474 # If we've been passed a string dump it into a temporary file
476 filename = tempfile.mktemp()
477 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
478 os.write (fd, message)
482 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
484 raise SendmailFailedError, output
486 # Clean up any temporary files
490 ################################################################################
492 def poolify (source, component):
495 if source[:3] == "lib":
496 return component + source[:4] + '/' + source + '/'
498 return component + source[:1] + '/' + source + '/'
500 ################################################################################
502 def move (src, dest, overwrite = 0, perms = 0664):
503 if os.path.exists(dest) and os.path.isdir(dest):
506 dest_dir = os.path.dirname(dest)
507 if not os.path.exists(dest_dir):
508 umask = os.umask(00000)
509 os.makedirs(dest_dir, 02775)
511 #print "Moving %s to %s..." % (src, dest)
512 if os.path.exists(dest) and os.path.isdir(dest):
513 dest += '/' + os.path.basename(src)
514 # Don't overwrite unless forced to
515 if os.path.exists(dest):
517 fubar("Can't move %s to %s - file already exists." % (src, dest))
519 if not os.access(dest, os.W_OK):
520 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
521 shutil.copy2(src, dest)
522 os.chmod(dest, perms)
525 def copy (src, dest, overwrite = 0, perms = 0664):
526 if os.path.exists(dest) and os.path.isdir(dest):
529 dest_dir = os.path.dirname(dest)
530 if not os.path.exists(dest_dir):
531 umask = os.umask(00000)
532 os.makedirs(dest_dir, 02775)
534 #print "Copying %s to %s..." % (src, dest)
535 if os.path.exists(dest) and os.path.isdir(dest):
536 dest += '/' + os.path.basename(src)
537 # Don't overwrite unless forced to
538 if os.path.exists(dest):
540 raise FileExistsError
542 if not os.access(dest, os.W_OK):
543 raise CantOverwriteError
544 shutil.copy2(src, dest)
545 os.chmod(dest, perms)
547 ################################################################################
550 res = socket.gethostbyaddr(socket.gethostname())
551 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
552 if database_hostname:
553 return database_hostname
557 def which_conf_file ():
558 res = socket.gethostbyaddr(socket.gethostname())
559 if Cnf.get("Config::" + res[0] + "::DakConfig"):
560 return Cnf["Config::" + res[0] + "::DakConfig"]
562 return default_config
564 def which_apt_conf_file ():
565 res = socket.gethostbyaddr(socket.gethostname())
566 if Cnf.get("Config::" + res[0] + "::AptConfig"):
567 return Cnf["Config::" + res[0] + "::AptConfig"]
569 return default_apt_config
571 def which_alias_file():
572 hostname = socket.gethostbyaddr(socket.gethostname())[0]
573 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
574 if os.path.exists(aliasfn):
579 ################################################################################
581 # Escape characters which have meaning to SQL's regex comparison operator ('~')
582 # (woefully incomplete)
585 s = s.replace('+', '\\\\+')
586 s = s.replace('.', '\\\\.')
589 ################################################################################
591 # Perform a substition of template
592 def TemplateSubst(map, filename):
593 file = open_file(filename)
594 template = file.read()
596 template = template.replace(x,map[x])
600 ################################################################################
602 def fubar(msg, exit_code=1):
603 sys.stderr.write("E: %s\n" % (msg))
607 sys.stderr.write("W: %s\n" % (msg))
609 ################################################################################
611 # Returns the user name with a laughable attempt at rfc822 conformancy
612 # (read: removing stray periods).
614 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
616 ################################################################################
626 return ("%d%s" % (c, t))
628 ################################################################################
630 def cc_fix_changes (changes):
631 o = changes.get("architecture", "")
633 del changes["architecture"]
634 changes["architecture"] = {}
636 changes["architecture"][j] = 1
638 # Sort by source name, source version, 'have source', and then by filename
639 def changes_compare (a, b):
641 a_changes = parse_changes(a)
646 b_changes = parse_changes(b)
650 cc_fix_changes (a_changes)
651 cc_fix_changes (b_changes)
653 # Sort by source name
654 a_source = a_changes.get("source")
655 b_source = b_changes.get("source")
656 q = cmp (a_source, b_source)
660 # Sort by source version
661 a_version = a_changes.get("version", "0")
662 b_version = b_changes.get("version", "0")
663 q = apt_pkg.VersionCompare(a_version, b_version)
667 # Sort by 'have source'
668 a_has_source = a_changes["architecture"].get("source")
669 b_has_source = b_changes["architecture"].get("source")
670 if a_has_source and not b_has_source:
672 elif b_has_source and not a_has_source:
675 # Fall back to sort by filename
678 ################################################################################
680 def find_next_free (dest, too_many=100):
683 while os.path.exists(dest) and extra < too_many:
684 dest = orig_dest + '.' + repr(extra)
686 if extra >= too_many:
687 raise NoFreeFilenameError
690 ################################################################################
692 def result_join (original, sep = '\t'):
694 for i in xrange(len(original)):
695 if original[i] == None:
698 list.append(original[i])
699 return sep.join(list)
701 ################################################################################
703 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
705 for line in str.split('\n'):
707 if line or include_blank_lines:
708 out += "%s%s\n" % (prefix, line)
709 # Strip trailing new line
714 ################################################################################
716 def validate_changes_file_arg(filename, require_changes=1):
717 """'filename' is either a .changes or .dak file. If 'filename' is a
718 .dak file, it's changed to be the corresponding .changes file. The
719 function then checks if the .changes file a) exists and b) is
720 readable and returns the .changes filename if so. If there's a
721 problem, the next action depends on the option 'require_changes'
724 o If 'require_changes' == -1, errors are ignored and the .changes
725 filename is returned.
726 o If 'require_changes' == 0, a warning is given and 'None' is returned.
727 o If 'require_changes' == 1, a fatal error is raised.
731 orig_filename = filename
732 if filename.endswith(".dak"):
733 filename = filename[:-4]+".changes"
735 if not filename.endswith(".changes"):
736 error = "invalid file type; not a changes file"
738 if not os.access(filename,os.R_OK):
739 if os.path.exists(filename):
740 error = "permission denied"
742 error = "file not found"
745 if require_changes == 1:
746 fubar("%s: %s." % (orig_filename, error))
747 elif require_changes == 0:
748 warn("Skipping %s - %s" % (orig_filename, error))
750 else: # We only care about the .dak file
755 ################################################################################
758 return (arch != "source" and arch != "all")
760 ################################################################################
762 def join_with_commas_and(list):
763 if len(list) == 0: return "nothing"
764 if len(list) == 1: return list[0]
765 return ", ".join(list[:-1]) + " and " + list[-1]
767 ################################################################################
772 (pkg, version, constraint) = atom
774 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
777 pp_deps.append(pp_dep)
778 return " |".join(pp_deps)
780 ################################################################################
785 ################################################################################
787 # Handle -a, -c and -s arguments; returns them as SQL constraints
788 def parse_args(Options):
792 for suite in split_args(Options["Suite"]):
793 suite_id = database.get_suite_id(suite)
795 warn("suite '%s' not recognised." % (suite))
797 suite_ids_list.append(suite_id)
799 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
801 fubar("No valid suite given.")
806 if Options["Component"]:
807 component_ids_list = []
808 for component in split_args(Options["Component"]):
809 component_id = database.get_component_id(component)
810 if component_id == -1:
811 warn("component '%s' not recognised." % (component))
813 component_ids_list.append(component_id)
814 if component_ids_list:
815 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
817 fubar("No valid component given.")
821 # Process architecture
822 con_architectures = ""
823 if Options["Architecture"]:
826 for architecture in split_args(Options["Architecture"]):
827 if architecture == "source":
830 architecture_id = database.get_architecture_id(architecture)
831 if architecture_id == -1:
832 warn("architecture '%s' not recognised." % (architecture))
834 arch_ids_list.append(architecture_id)
836 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
839 fubar("No valid architecture given.")
843 return (con_suites, con_architectures, con_components, check_source)
845 ################################################################################
847 # Inspired(tm) by Bryn Keller's print_exc_plus (See
848 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
851 tb = sys.exc_info()[2]
860 traceback.print_exc()
862 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
863 frame.f_code.co_filename,
865 for key, value in frame.f_locals.items():
866 print "\t%20s = " % key,
870 print "<unable to print>"
872 ################################################################################
874 def try_with_debug(function):
882 ################################################################################
884 # Function for use in sorting lists of architectures.
885 # Sorts normally except that 'source' dominates all others.
887 def arch_compare_sw (a, b):
888 if a == "source" and b == "source":
897 ################################################################################
899 # Split command line arguments which can be separated by either commas
900 # or whitespace. If dwim is set, it will complain about string ending
901 # in comma since this usually means someone did 'dak ls -a i386, m68k
902 # foo' or something and the inevitable confusion resulting from 'm68k'
903 # being treated as an argument is undesirable.
905 def split_args (s, dwim=1):
906 if s.find(",") == -1:
909 if s[-1:] == "," and dwim:
910 fubar("split_args: found trailing comma, spurious space maybe?")
913 ################################################################################
915 def Dict(**dict): return dict
917 ########################################
919 # Our very own version of commands.getouputstatus(), hacked to support
921 def gpgv_get_status_output(cmd, status_read, status_write):
922 cmd = ['/bin/sh', '-c', cmd]
923 p2cread, p2cwrite = os.pipe()
924 c2pread, c2pwrite = os.pipe()
925 errout, errin = os.pipe()
935 for i in range(3, 256):
936 if i != status_write:
942 os.execvp(cmd[0], cmd)
948 os.dup2(c2pread, c2pwrite)
949 os.dup2(errout, errin)
953 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
956 r = os.read(fd, 8196)
959 if fd == c2pwrite or fd == errin:
961 elif fd == status_read:
964 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
966 pid, exit_status = os.waitpid(pid, 0)
968 os.close(status_write)
969 os.close(status_read)
979 return output, status, exit_status
981 ################################################################################
983 def process_gpgv_output(status):
984 # Process the status-fd output
987 for line in status.split('\n'):
993 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
995 (gnupg, keyword) = split[:2]
996 if gnupg != "[GNUPG:]":
997 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1000 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1001 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1004 keywords[keyword] = args
1006 return (keywords, internal_error)
1008 ################################################################################
1010 def retrieve_key (filename, keyserver=None, keyring=None):
1011 """Retrieve the key that signed 'filename' from 'keyserver' and
1012 add it to 'keyring'. Returns nothing on success, or an error message
1015 # Defaults for keyserver and keyring
1017 keyserver = Cnf["Dinstall::KeyServer"]
1019 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1021 # Ensure the filename contains no shell meta-characters or other badness
1022 if not re_taint_free.match(filename):
1023 return "%s: tainted filename" % (filename)
1025 # Invoke gpgv on the file
1026 status_read, status_write = os.pipe();
1027 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1028 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1030 # Process the status-fd output
1031 (keywords, internal_error) = process_gpgv_output(status)
1033 return internal_error
1035 if not keywords.has_key("NO_PUBKEY"):
1036 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1038 fingerprint = keywords["NO_PUBKEY"][0]
1039 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1040 # it'll try to create a lockfile in /dev. A better solution might
1041 # be a tempfile or something.
1042 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1043 % (Cnf["Dinstall::SigningKeyring"])
1044 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1045 % (keyring, keyserver, fingerprint)
1046 (result, output) = commands.getstatusoutput(cmd)
1048 return "'%s' failed with exit code %s" % (cmd, result)
1052 ################################################################################
1054 def gpg_keyring_args(keyrings=None):
1056 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1058 return " ".join(["--keyring %s" % x for x in keyrings])
1060 ################################################################################
1062 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1063 """Check the signature of a file and return the fingerprint if the
1064 signature is valid or 'None' if it's not. The first argument is the
1065 filename whose signature should be checked. The second argument is a
1066 reject function and is called when an error is found. The reject()
1067 function must allow for two arguments: the first is the error message,
1068 the second is an optional prefix string. It's possible for reject()
1069 to be called more than once during an invocation of check_signature().
1070 The third argument is optional and is the name of the files the
1071 detached signature applies to. The fourth argument is optional and is
1072 a *list* of keyrings to use. 'autofetch' can either be None, True or
1073 False. If None, the default behaviour specified in the config will be
1076 # Ensure the filename contains no shell meta-characters or other badness
1077 if not re_taint_free.match(sig_filename):
1078 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1081 if data_filename and not re_taint_free.match(data_filename):
1082 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1086 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1088 # Autofetch the signing key if that's enabled
1089 if autofetch == None:
1090 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1092 error_msg = retrieve_key(sig_filename)
1097 # Build the command line
1098 status_read, status_write = os.pipe();
1099 cmd = "gpgv --status-fd %s %s %s %s" % (
1100 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1102 # Invoke gpgv on the file
1103 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1105 # Process the status-fd output
1106 (keywords, internal_error) = process_gpgv_output(status)
1108 # If we failed to parse the status-fd output, let's just whine and bail now
1110 reject("internal error while performing signature check on %s." % (sig_filename))
1111 reject(internal_error, "")
1112 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1116 # Now check for obviously bad things in the processed output
1117 if keywords.has_key("KEYREVOKED"):
1118 reject("The key used to sign %s has been revoked." % (sig_filename))
1120 if keywords.has_key("BADSIG"):
1121 reject("bad signature on %s." % (sig_filename))
1123 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1124 reject("failed to check signature on %s." % (sig_filename))
1126 if keywords.has_key("NO_PUBKEY"):
1127 args = keywords["NO_PUBKEY"]
1130 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1132 if keywords.has_key("BADARMOR"):
1133 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1135 if keywords.has_key("NODATA"):
1136 reject("no signature found in %s." % (sig_filename))
1138 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1139 args = keywords["KEYEXPIRED"]
1142 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1148 # Next check gpgv exited with a zero return code
1150 reject("gpgv failed while checking %s." % (sig_filename))
1152 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1154 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1157 # Sanity check the good stuff we expect
1158 if not keywords.has_key("VALIDSIG"):
1159 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1162 args = keywords["VALIDSIG"]
1164 reject("internal error while checking signature on %s." % (sig_filename))
1167 fingerprint = args[0]
1168 if not keywords.has_key("GOODSIG"):
1169 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1171 if not keywords.has_key("SIG_ID"):
1172 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1175 # Finally ensure there's not something we don't recognise
1176 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1177 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1178 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1180 for keyword in keywords.keys():
1181 if not known_keywords.has_key(keyword):
1182 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1190 ################################################################################
1192 def gpg_get_key_addresses(fingerprint):
1193 """retreive email addresses from gpg key uids for a given fingerprint"""
1194 addresses = key_uid_email_cache.get(fingerprint)
1195 if addresses != None:
1198 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1199 % (gpg_keyring_args(), fingerprint)
1200 (result, output) = commands.getstatusoutput(cmd)
1202 for l in output.split('\n'):
1203 m = re_gpg_uid.match(l)
1205 addresses.add(m.group(1))
1206 key_uid_email_cache[fingerprint] = addresses
1209 ################################################################################
1211 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1213 def wrap(paragraph, max_length, prefix=""):
1217 words = paragraph.split()
1220 word_size = len(word)
1221 if word_size > max_length:
1223 s += line + '\n' + prefix
1224 s += word + '\n' + prefix
1227 new_length = len(line) + word_size + 1
1228 if new_length > max_length:
1229 s += line + '\n' + prefix
1242 ################################################################################
1244 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1245 # Returns fixed 'src'
1246 def clean_symlink (src, dest, root):
1247 src = src.replace(root, '', 1)
1248 dest = dest.replace(root, '', 1)
1249 dest = os.path.dirname(dest)
1250 new_src = '../' * len(dest.split('/'))
1251 return new_src + src
1253 ################################################################################
1255 def temp_filename(directory=None, dotprefix=None, perms=0700):
1256 """Return a secure and unique filename by pre-creating it.
1257 If 'directory' is non-null, it will be the directory the file is pre-created in.
1258 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1261 old_tempdir = tempfile.tempdir
1262 tempfile.tempdir = directory
1264 filename = tempfile.mktemp()
1267 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1268 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1272 tempfile.tempdir = old_tempdir
1276 ################################################################################
1278 # checks if the user part of the email is listed in the alias file
1280 def is_email_alias(email):
1282 if alias_cache == None:
1283 aliasfn = which_alias_file()
1286 for l in open(aliasfn):
1287 alias_cache.add(l.split(':')[0])
1288 uid = email.split('@')[0]
1289 return uid in alias_cache
1291 ################################################################################
1295 Cnf = apt_pkg.newConfiguration()
1296 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1298 if which_conf_file() != default_config:
1299 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1301 ################################################################################