4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 ################################################################################
8 # This program is free software; you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation; either version 2 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program; if not, write to the Free Software
20 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 ################################################################################
24 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
25 sys, tempfile, traceback, stat
28 from dak_exceptions import *
30 ################################################################################
32 re_comments = re.compile(r"\#.*")
33 re_no_epoch = re.compile(r"^\d+\:")
34 re_no_revision = re.compile(r"-[^-]+$")
35 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
36 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
37 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
38 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
40 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
41 re_multi_line_field = re.compile(r"^\s(.*)")
42 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
44 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
45 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
47 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
48 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
50 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
52 default_config = "/etc/dak/dak.conf"
53 default_apt_config = "/etc/dak/apt.conf"
56 key_uid_email_cache = {}
58 # (hashname, function, earliest_changes_version)
59 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
60 ("sha256", apt_pkg.sha256sum, (1, 8))]
62 ################################################################################
64 def open_file(filename, mode='r'):
66 f = open(filename, mode)
68 raise CantOpenError, filename
71 ################################################################################
73 def our_raw_input(prompt=""):
75 sys.stdout.write(prompt)
81 sys.stderr.write("\nUser interrupt (^D).\n")
84 ################################################################################
86 def extract_component_from_section(section):
89 if section.find('/') != -1:
90 component = section.split('/')[0]
92 # Expand default component
94 if Cnf.has_key("Component::%s" % section):
99 return (section, component)
101 ################################################################################
103 def parse_changes(filename, signing_rules=0):
104 """Parses a changes file and returns a dictionary where each field is a
105 key. The mandatory first argument is the filename of the .changes
108 signing_rules is an optional argument:
110 o If signing_rules == -1, no signature is required.
111 o If signing_rules == 0 (the default), a signature is required.
112 o If signing_rules == 1, it turns on the same strict format checking
115 The rules for (signing_rules == 1)-mode are:
117 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
118 followed by any PGP header data and must end with a blank line.
120 o The data section must end with a blank line and must be followed by
121 "-----BEGIN PGP SIGNATURE-----".
127 changes_in = open_file(filename)
128 lines = changes_in.readlines()
131 raise ParseChangesError, "[Empty changes file]"
133 # Reindex by line number so we can easily verify the format of
139 indexed_lines[index] = line[:-1]
143 num_of_lines = len(indexed_lines.keys())
146 while index < num_of_lines:
148 line = indexed_lines[index]
150 if signing_rules == 1:
152 if index > num_of_lines:
153 raise InvalidDscError, index
154 line = indexed_lines[index]
155 if not line.startswith("-----BEGIN PGP SIGNATURE"):
156 raise InvalidDscError, index
161 if line.startswith("-----BEGIN PGP SIGNATURE"):
163 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
165 if signing_rules == 1:
166 while index < num_of_lines and line != "":
168 line = indexed_lines[index]
170 # If we're not inside the signed data, don't process anything
171 if signing_rules >= 0 and not inside_signature:
173 slf = re_single_line_field.match(line)
175 field = slf.groups()[0].lower()
176 changes[field] = slf.groups()[1]
180 changes[field] += '\n'
182 mlf = re_multi_line_field.match(line)
185 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
186 if first == 1 and changes[field] != "":
187 changes[field] += '\n'
189 changes[field] += mlf.groups()[0] + '\n'
193 if signing_rules == 1 and inside_signature:
194 raise InvalidDscError, index
197 changes["filecontents"] = "".join(lines)
199 if changes.has_key("source"):
200 # Strip the source version in brackets from the source field,
201 # put it in the "source-version" field instead.
202 srcver = re_srchasver.search(changes["source"])
204 changes["source"] = srcver.group(1)
205 changes["source-version"] = srcver.group(2)
208 raise ParseChangesError, error
212 ################################################################################
214 def create_hash (lfiles, key, testfn, basedict = None):
216 for f in lfiles.keys():
218 file_handle = open_file(f)
219 except CantOpenError:
220 rejmsg.append("Could not open file %s for checksumming" % (f))
223 if basedict and basedict.has_key(f):
224 basedict[f]['%ssum' % key] = testfn(file_handle)
229 ################################################################################
231 def check_hash (where, lfiles, key, testfn, basedict = None):
234 for f in basedict.keys():
236 rejmsg.append("%s: no %s checksum" % (f, key))
238 for f in lfiles.keys():
239 if basedict and f not in basedict:
240 rejmsg.append("%s: extraneous entry in %s checksums" % (f, key))
243 file_handle = open_file(f)
244 except CantOpenError:
248 if testfn(file_handle) != lfiles[f][key]:
249 rejmsg.append("%s: %s check failed." % (f, key))
251 # Store the hashes for later use
253 basedict[f]['%ssum' % key] = lfiles[f][key]
255 actual_size = os.stat(f)[stat.ST_SIZE]
256 size = int(lfiles[f]["size"])
257 if size != actual_size:
258 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
259 % (f, actual_size, size, where))
263 ################################################################################
265 def ensure_hashes(changes, dsc, files, dsc_files):
266 # Make sure we recognise the format of the Files: field
267 format = changes.get("format", "0.0").split(".",1)
269 format = int(format[0]), int(format[1])
271 format = int(float(format[0])), 0
275 if x.startswith("checksum-"):
276 h = x.split("-",1)[1]
277 if h not in dict(known_hashes):
278 rejmsg.append("Unsupported checksum field in .changes" % (h))
281 if x.startswith("checksum-"):
282 h = x.split("-",1)[1]
283 if h not in dict(known_hashes):
284 rejmsg.append("Unsupported checksum field in .dsc" % (h))
287 fs_m = build_file_list(changes, 0)
288 if "source" in changes["architecture"]:
289 fs_md = build_file_list(dsc, 1)
291 # We have to calculate the hash if we have an earlier changes version than
292 # the hash appears in rather than require it exist in the changes file
293 # I hate backwards compatibility
294 for h,f,v in known_hashes:
298 for m in create_hash(fs_m, h, f, files):
301 fs = build_file_list(changes, 0, "checksums-%s" % h, h)
302 for m in check_hash(".changes %s" % (h), fs, h, f, files):
304 except NoFilesFieldError:
305 rejmsg.append("No Checksums-%s: field in .changes" % (h))
306 except UnknownFormatError, format:
307 rejmsg.append("%s: unknown format of .changes" % (format))
308 except ParseChangesError, line:
309 rejmsg.append("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
311 if "source" not in changes["architecture"]: continue
315 for m in create_hash(fs_md, h, f, dsc_files):
318 fs = build_file_list(dsc, 1, "checksums-%s" % h, h)
319 for m in check_hash(".dsc %s" % (h), fs, h, f, dsc_files):
321 except UnknownFormatError, format:
322 rejmsg.append("%s: unknown format of .dsc" % (format))
323 except NoFilesFieldError:
324 rejmsg.append("No Checksums-%s: field in .dsc" % (h))
325 except ParseChangesError, line:
326 rejmsg.append("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
330 ################################################################################
332 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
334 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
337 # Make sure we have a Files: field to parse...
338 if not changes.has_key(field):
339 raise NoFilesFieldError
341 # Make sure we recognise the format of the Files: field
342 format = re_verwithext.search(changes.get("format", "0.0"))
344 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
346 format = format.groups()
347 if format[1] == None:
348 format = int(float(format[0])), 0, format[2]
350 format = int(format[0]), int(format[1]), format[2]
351 if format[2] == None:
356 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
358 if (format < (1,5) or format > (1,8)):
359 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
360 if field != "files" and format < (1,8):
361 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
363 includes_section = (not is_a_dsc) and field == "files"
365 # Parse each entry/line:
366 for i in changes[field].split('\n'):
370 section = priority = ""
373 (md5, size, section, priority, name) = s
375 (md5, size, name) = s
377 raise ParseChangesError, i
384 (section, component) = extract_component_from_section(section)
386 files[name] = Dict(size=size, section=section,
387 priority=priority, component=component)
388 files[name][hashname] = md5
392 ################################################################################
394 def force_to_utf8(s):
395 """Forces a string to UTF-8. If the string isn't already UTF-8,
396 it's assumed to be ISO-8859-1."""
401 latin1_s = unicode(s,'iso8859-1')
402 return latin1_s.encode('utf-8')
404 def rfc2047_encode(s):
405 """Encodes a (header) string per RFC2047 if necessary. If the
406 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
408 codecs.lookup('ascii')[1](s)
413 codecs.lookup('utf-8')[1](s)
414 h = email.Header.Header(s, 'utf-8', 998)
417 h = email.Header.Header(s, 'iso-8859-1', 998)
420 ################################################################################
422 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
423 # with it. I know - I'll fix the suckage and make things
426 def fix_maintainer (maintainer):
427 """Parses a Maintainer or Changed-By field and returns:
428 (1) an RFC822 compatible version,
429 (2) an RFC2047 compatible version,
433 The name is forced to UTF-8 for both (1) and (3). If the name field
434 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
435 switched to 'email (name)' format."""
436 maintainer = maintainer.strip()
438 return ('', '', '', '')
440 if maintainer.find("<") == -1:
443 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
444 email = maintainer[1:-1]
447 m = re_parse_maintainer.match(maintainer)
449 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
453 # Get an RFC2047 compliant version of the name
454 rfc2047_name = rfc2047_encode(name)
456 # Force the name to be UTF-8
457 name = force_to_utf8(name)
459 if name.find(',') != -1 or name.find('.') != -1:
460 rfc822_maint = "%s (%s)" % (email, name)
461 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
463 rfc822_maint = "%s <%s>" % (name, email)
464 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
466 if email.find("@") == -1 and email.find("buildd_") != 0:
467 raise ParseMaintError, "No @ found in email address part."
469 return (rfc822_maint, rfc2047_maint, name, email)
471 ################################################################################
473 # sendmail wrapper, takes _either_ a message string or a file as arguments
474 def send_mail (message, filename=""):
475 # If we've been passed a string dump it into a temporary file
477 filename = tempfile.mktemp()
478 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
479 os.write (fd, message)
483 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
485 raise SendmailFailedError, output
487 # Clean up any temporary files
491 ################################################################################
493 def poolify (source, component):
496 if source[:3] == "lib":
497 return component + source[:4] + '/' + source + '/'
499 return component + source[:1] + '/' + source + '/'
501 ################################################################################
503 def move (src, dest, overwrite = 0, perms = 0664):
504 if os.path.exists(dest) and os.path.isdir(dest):
507 dest_dir = os.path.dirname(dest)
508 if not os.path.exists(dest_dir):
509 umask = os.umask(00000)
510 os.makedirs(dest_dir, 02775)
512 #print "Moving %s to %s..." % (src, dest)
513 if os.path.exists(dest) and os.path.isdir(dest):
514 dest += '/' + os.path.basename(src)
515 # Don't overwrite unless forced to
516 if os.path.exists(dest):
518 fubar("Can't move %s to %s - file already exists." % (src, dest))
520 if not os.access(dest, os.W_OK):
521 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
522 shutil.copy2(src, dest)
523 os.chmod(dest, perms)
526 def copy (src, dest, overwrite = 0, perms = 0664):
527 if os.path.exists(dest) and os.path.isdir(dest):
530 dest_dir = os.path.dirname(dest)
531 if not os.path.exists(dest_dir):
532 umask = os.umask(00000)
533 os.makedirs(dest_dir, 02775)
535 #print "Copying %s to %s..." % (src, dest)
536 if os.path.exists(dest) and os.path.isdir(dest):
537 dest += '/' + os.path.basename(src)
538 # Don't overwrite unless forced to
539 if os.path.exists(dest):
541 raise FileExistsError
543 if not os.access(dest, os.W_OK):
544 raise CantOverwriteError
545 shutil.copy2(src, dest)
546 os.chmod(dest, perms)
548 ################################################################################
551 res = socket.gethostbyaddr(socket.gethostname())
552 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
553 if database_hostname:
554 return database_hostname
558 def which_conf_file ():
559 res = socket.gethostbyaddr(socket.gethostname())
560 if Cnf.get("Config::" + res[0] + "::DakConfig"):
561 return Cnf["Config::" + res[0] + "::DakConfig"]
563 return default_config
565 def which_apt_conf_file ():
566 res = socket.gethostbyaddr(socket.gethostname())
567 if Cnf.get("Config::" + res[0] + "::AptConfig"):
568 return Cnf["Config::" + res[0] + "::AptConfig"]
570 return default_apt_config
572 def which_alias_file():
573 hostname = socket.gethostbyaddr(socket.gethostname())[0]
574 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
575 if os.path.exists(aliasfn):
580 ################################################################################
582 # Escape characters which have meaning to SQL's regex comparison operator ('~')
583 # (woefully incomplete)
586 s = s.replace('+', '\\\\+')
587 s = s.replace('.', '\\\\.')
590 ################################################################################
592 # Perform a substition of template
593 def TemplateSubst(map, filename):
594 file = open_file(filename)
595 template = file.read()
597 template = template.replace(x,map[x])
601 ################################################################################
603 def fubar(msg, exit_code=1):
604 sys.stderr.write("E: %s\n" % (msg))
608 sys.stderr.write("W: %s\n" % (msg))
610 ################################################################################
612 # Returns the user name with a laughable attempt at rfc822 conformancy
613 # (read: removing stray periods).
615 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
617 ################################################################################
627 return ("%d%s" % (c, t))
629 ################################################################################
631 def cc_fix_changes (changes):
632 o = changes.get("architecture", "")
634 del changes["architecture"]
635 changes["architecture"] = {}
637 changes["architecture"][j] = 1
639 # Sort by source name, source version, 'have source', and then by filename
640 def changes_compare (a, b):
642 a_changes = parse_changes(a)
647 b_changes = parse_changes(b)
651 cc_fix_changes (a_changes)
652 cc_fix_changes (b_changes)
654 # Sort by source name
655 a_source = a_changes.get("source")
656 b_source = b_changes.get("source")
657 q = cmp (a_source, b_source)
661 # Sort by source version
662 a_version = a_changes.get("version", "0")
663 b_version = b_changes.get("version", "0")
664 q = apt_pkg.VersionCompare(a_version, b_version)
668 # Sort by 'have source'
669 a_has_source = a_changes["architecture"].get("source")
670 b_has_source = b_changes["architecture"].get("source")
671 if a_has_source and not b_has_source:
673 elif b_has_source and not a_has_source:
676 # Fall back to sort by filename
679 ################################################################################
681 def find_next_free (dest, too_many=100):
684 while os.path.exists(dest) and extra < too_many:
685 dest = orig_dest + '.' + repr(extra)
687 if extra >= too_many:
688 raise NoFreeFilenameError
691 ################################################################################
693 def result_join (original, sep = '\t'):
695 for i in xrange(len(original)):
696 if original[i] == None:
699 list.append(original[i])
700 return sep.join(list)
702 ################################################################################
704 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
706 for line in str.split('\n'):
708 if line or include_blank_lines:
709 out += "%s%s\n" % (prefix, line)
710 # Strip trailing new line
715 ################################################################################
717 def validate_changes_file_arg(filename, require_changes=1):
718 """'filename' is either a .changes or .dak file. If 'filename' is a
719 .dak file, it's changed to be the corresponding .changes file. The
720 function then checks if the .changes file a) exists and b) is
721 readable and returns the .changes filename if so. If there's a
722 problem, the next action depends on the option 'require_changes'
725 o If 'require_changes' == -1, errors are ignored and the .changes
726 filename is returned.
727 o If 'require_changes' == 0, a warning is given and 'None' is returned.
728 o If 'require_changes' == 1, a fatal error is raised.
732 orig_filename = filename
733 if filename.endswith(".dak"):
734 filename = filename[:-4]+".changes"
736 if not filename.endswith(".changes"):
737 error = "invalid file type; not a changes file"
739 if not os.access(filename,os.R_OK):
740 if os.path.exists(filename):
741 error = "permission denied"
743 error = "file not found"
746 if require_changes == 1:
747 fubar("%s: %s." % (orig_filename, error))
748 elif require_changes == 0:
749 warn("Skipping %s - %s" % (orig_filename, error))
751 else: # We only care about the .dak file
756 ################################################################################
759 return (arch != "source" and arch != "all")
761 ################################################################################
763 def join_with_commas_and(list):
764 if len(list) == 0: return "nothing"
765 if len(list) == 1: return list[0]
766 return ", ".join(list[:-1]) + " and " + list[-1]
768 ################################################################################
773 (pkg, version, constraint) = atom
775 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
778 pp_deps.append(pp_dep)
779 return " |".join(pp_deps)
781 ################################################################################
786 ################################################################################
788 # Handle -a, -c and -s arguments; returns them as SQL constraints
789 def parse_args(Options):
793 for suite in split_args(Options["Suite"]):
794 suite_id = database.get_suite_id(suite)
796 warn("suite '%s' not recognised." % (suite))
798 suite_ids_list.append(suite_id)
800 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
802 fubar("No valid suite given.")
807 if Options["Component"]:
808 component_ids_list = []
809 for component in split_args(Options["Component"]):
810 component_id = database.get_component_id(component)
811 if component_id == -1:
812 warn("component '%s' not recognised." % (component))
814 component_ids_list.append(component_id)
815 if component_ids_list:
816 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
818 fubar("No valid component given.")
822 # Process architecture
823 con_architectures = ""
824 if Options["Architecture"]:
827 for architecture in split_args(Options["Architecture"]):
828 if architecture == "source":
831 architecture_id = database.get_architecture_id(architecture)
832 if architecture_id == -1:
833 warn("architecture '%s' not recognised." % (architecture))
835 arch_ids_list.append(architecture_id)
837 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
840 fubar("No valid architecture given.")
844 return (con_suites, con_architectures, con_components, check_source)
846 ################################################################################
848 # Inspired(tm) by Bryn Keller's print_exc_plus (See
849 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
852 tb = sys.exc_info()[2]
861 traceback.print_exc()
863 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
864 frame.f_code.co_filename,
866 for key, value in frame.f_locals.items():
867 print "\t%20s = " % key,
871 print "<unable to print>"
873 ################################################################################
875 def try_with_debug(function):
883 ################################################################################
885 # Function for use in sorting lists of architectures.
886 # Sorts normally except that 'source' dominates all others.
888 def arch_compare_sw (a, b):
889 if a == "source" and b == "source":
898 ################################################################################
900 # Split command line arguments which can be separated by either commas
901 # or whitespace. If dwim is set, it will complain about string ending
902 # in comma since this usually means someone did 'dak ls -a i386, m68k
903 # foo' or something and the inevitable confusion resulting from 'm68k'
904 # being treated as an argument is undesirable.
906 def split_args (s, dwim=1):
907 if s.find(",") == -1:
910 if s[-1:] == "," and dwim:
911 fubar("split_args: found trailing comma, spurious space maybe?")
914 ################################################################################
916 def Dict(**dict): return dict
918 ########################################
920 # Our very own version of commands.getouputstatus(), hacked to support
922 def gpgv_get_status_output(cmd, status_read, status_write):
923 cmd = ['/bin/sh', '-c', cmd]
924 p2cread, p2cwrite = os.pipe()
925 c2pread, c2pwrite = os.pipe()
926 errout, errin = os.pipe()
936 for i in range(3, 256):
937 if i != status_write:
943 os.execvp(cmd[0], cmd)
949 os.dup2(c2pread, c2pwrite)
950 os.dup2(errout, errin)
954 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
957 r = os.read(fd, 8196)
960 if fd == c2pwrite or fd == errin:
962 elif fd == status_read:
965 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
967 pid, exit_status = os.waitpid(pid, 0)
969 os.close(status_write)
970 os.close(status_read)
980 return output, status, exit_status
982 ################################################################################
984 def process_gpgv_output(status):
985 # Process the status-fd output
988 for line in status.split('\n'):
994 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
996 (gnupg, keyword) = split[:2]
997 if gnupg != "[GNUPG:]":
998 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1001 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1002 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1005 keywords[keyword] = args
1007 return (keywords, internal_error)
1009 ################################################################################
1011 def retrieve_key (filename, keyserver=None, keyring=None):
1012 """Retrieve the key that signed 'filename' from 'keyserver' and
1013 add it to 'keyring'. Returns nothing on success, or an error message
1016 # Defaults for keyserver and keyring
1018 keyserver = Cnf["Dinstall::KeyServer"]
1020 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1022 # Ensure the filename contains no shell meta-characters or other badness
1023 if not re_taint_free.match(filename):
1024 return "%s: tainted filename" % (filename)
1026 # Invoke gpgv on the file
1027 status_read, status_write = os.pipe();
1028 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1029 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1031 # Process the status-fd output
1032 (keywords, internal_error) = process_gpgv_output(status)
1034 return internal_error
1036 if not keywords.has_key("NO_PUBKEY"):
1037 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1039 fingerprint = keywords["NO_PUBKEY"][0]
1040 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1041 # it'll try to create a lockfile in /dev. A better solution might
1042 # be a tempfile or something.
1043 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1044 % (Cnf["Dinstall::SigningKeyring"])
1045 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1046 % (keyring, keyserver, fingerprint)
1047 (result, output) = commands.getstatusoutput(cmd)
1049 return "'%s' failed with exit code %s" % (cmd, result)
1053 ################################################################################
1055 def gpg_keyring_args(keyrings=None):
1057 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1059 return " ".join(["--keyring %s" % x for x in keyrings])
1061 ################################################################################
1063 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1064 """Check the signature of a file and return the fingerprint if the
1065 signature is valid or 'None' if it's not. The first argument is the
1066 filename whose signature should be checked. The second argument is a
1067 reject function and is called when an error is found. The reject()
1068 function must allow for two arguments: the first is the error message,
1069 the second is an optional prefix string. It's possible for reject()
1070 to be called more than once during an invocation of check_signature().
1071 The third argument is optional and is the name of the files the
1072 detached signature applies to. The fourth argument is optional and is
1073 a *list* of keyrings to use. 'autofetch' can either be None, True or
1074 False. If None, the default behaviour specified in the config will be
1077 # Ensure the filename contains no shell meta-characters or other badness
1078 if not re_taint_free.match(sig_filename):
1079 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1082 if data_filename and not re_taint_free.match(data_filename):
1083 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1087 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1089 # Autofetch the signing key if that's enabled
1090 if autofetch == None:
1091 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1093 error_msg = retrieve_key(sig_filename)
1098 # Build the command line
1099 status_read, status_write = os.pipe();
1100 cmd = "gpgv --status-fd %s %s %s %s" % (
1101 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1103 # Invoke gpgv on the file
1104 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1106 # Process the status-fd output
1107 (keywords, internal_error) = process_gpgv_output(status)
1109 # If we failed to parse the status-fd output, let's just whine and bail now
1111 reject("internal error while performing signature check on %s." % (sig_filename))
1112 reject(internal_error, "")
1113 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1117 # Now check for obviously bad things in the processed output
1118 if keywords.has_key("KEYREVOKED"):
1119 reject("The key used to sign %s has been revoked." % (sig_filename))
1121 if keywords.has_key("BADSIG"):
1122 reject("bad signature on %s." % (sig_filename))
1124 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1125 reject("failed to check signature on %s." % (sig_filename))
1127 if keywords.has_key("NO_PUBKEY"):
1128 args = keywords["NO_PUBKEY"]
1131 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1133 if keywords.has_key("BADARMOR"):
1134 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1136 if keywords.has_key("NODATA"):
1137 reject("no signature found in %s." % (sig_filename))
1139 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1140 args = keywords["KEYEXPIRED"]
1143 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1149 # Next check gpgv exited with a zero return code
1151 reject("gpgv failed while checking %s." % (sig_filename))
1153 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1155 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1158 # Sanity check the good stuff we expect
1159 if not keywords.has_key("VALIDSIG"):
1160 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1163 args = keywords["VALIDSIG"]
1165 reject("internal error while checking signature on %s." % (sig_filename))
1168 fingerprint = args[0]
1169 if not keywords.has_key("GOODSIG"):
1170 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1172 if not keywords.has_key("SIG_ID"):
1173 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1176 # Finally ensure there's not something we don't recognise
1177 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1178 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1179 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1181 for keyword in keywords.keys():
1182 if not known_keywords.has_key(keyword):
1183 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1191 ################################################################################
1193 def gpg_get_key_addresses(fingerprint):
1194 """retreive email addresses from gpg key uids for a given fingerprint"""
1195 addresses = key_uid_email_cache.get(fingerprint)
1196 if addresses != None:
1199 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1200 % (gpg_keyring_args(), fingerprint)
1201 (result, output) = commands.getstatusoutput(cmd)
1203 for l in output.split('\n'):
1204 m = re_gpg_uid.match(l)
1206 addresses.add(m.group(1))
1207 key_uid_email_cache[fingerprint] = addresses
1210 ################################################################################
1212 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1214 def wrap(paragraph, max_length, prefix=""):
1218 words = paragraph.split()
1221 word_size = len(word)
1222 if word_size > max_length:
1224 s += line + '\n' + prefix
1225 s += word + '\n' + prefix
1228 new_length = len(line) + word_size + 1
1229 if new_length > max_length:
1230 s += line + '\n' + prefix
1243 ################################################################################
1245 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1246 # Returns fixed 'src'
1247 def clean_symlink (src, dest, root):
1248 src = src.replace(root, '', 1)
1249 dest = dest.replace(root, '', 1)
1250 dest = os.path.dirname(dest)
1251 new_src = '../' * len(dest.split('/'))
1252 return new_src + src
1254 ################################################################################
1256 def temp_filename(directory=None, dotprefix=None, perms=0700):
1257 """Return a secure and unique filename by pre-creating it.
1258 If 'directory' is non-null, it will be the directory the file is pre-created in.
1259 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1262 old_tempdir = tempfile.tempdir
1263 tempfile.tempdir = directory
1265 filename = tempfile.mktemp()
1268 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1269 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1273 tempfile.tempdir = old_tempdir
1277 ################################################################################
1279 # checks if the user part of the email is listed in the alias file
1281 def is_email_alias(email):
1283 if alias_cache == None:
1284 aliasfn = which_alias_file()
1287 for l in open(aliasfn):
1288 alias_cache.add(l.split(':')[0])
1289 uid = email.split('@')[0]
1290 return uid in alias_cache
1292 ################################################################################
1296 Cnf = apt_pkg.newConfiguration()
1297 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1299 if which_conf_file() != default_config:
1300 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1302 ################################################################################