4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 ################################################################################
8 # This program is free software; you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation; either version 2 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program; if not, write to the Free Software
20 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 ################################################################################
24 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
25 sys, tempfile, traceback, stat
28 from dak_exceptions import *
30 ################################################################################
32 re_comments = re.compile(r"\#.*")
33 re_no_epoch = re.compile(r"^\d+\:")
34 re_no_revision = re.compile(r"-[^-]+$")
35 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
36 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
37 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
38 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
40 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
41 re_multi_line_field = re.compile(r"^\s(.*)")
42 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
44 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
45 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
47 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
48 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
50 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
52 default_config = "/etc/dak/dak.conf"
53 default_apt_config = "/etc/dak/apt.conf"
56 key_uid_email_cache = {}
58 # (hashname, function, earliest_changes_version)
59 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
60 ("sha256", apt_pkg.sha256sum, (1, 8))]
62 ################################################################################
64 def open_file(filename, mode='r'):
66 f = open(filename, mode)
68 raise CantOpenError, filename
71 ################################################################################
73 def our_raw_input(prompt=""):
75 sys.stdout.write(prompt)
81 sys.stderr.write("\nUser interrupt (^D).\n")
84 ################################################################################
86 def extract_component_from_section(section):
89 if section.find('/') != -1:
90 component = section.split('/')[0]
92 # Expand default component
94 if Cnf.has_key("Component::%s" % section):
99 return (section, component)
101 ################################################################################
103 def parse_changes(filename, signing_rules=0):
104 """Parses a changes file and returns a dictionary where each field is a
105 key. The mandatory first argument is the filename of the .changes
108 signing_rules is an optional argument:
110 o If signing_rules == -1, no signature is required.
111 o If signing_rules == 0 (the default), a signature is required.
112 o If signing_rules == 1, it turns on the same strict format checking
115 The rules for (signing_rules == 1)-mode are:
117 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
118 followed by any PGP header data and must end with a blank line.
120 o The data section must end with a blank line and must be followed by
121 "-----BEGIN PGP SIGNATURE-----".
127 changes_in = open_file(filename)
128 lines = changes_in.readlines()
131 raise ParseChangesError, "[Empty changes file]"
133 # Reindex by line number so we can easily verify the format of
139 indexed_lines[index] = line[:-1]
143 num_of_lines = len(indexed_lines.keys())
146 while index < num_of_lines:
148 line = indexed_lines[index]
150 if signing_rules == 1:
152 if index > num_of_lines:
153 raise InvalidDscError, index
154 line = indexed_lines[index]
155 if not line.startswith("-----BEGIN PGP SIGNATURE"):
156 raise InvalidDscError, index
161 if line.startswith("-----BEGIN PGP SIGNATURE"):
163 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
165 if signing_rules == 1:
166 while index < num_of_lines and line != "":
168 line = indexed_lines[index]
170 # If we're not inside the signed data, don't process anything
171 if signing_rules >= 0 and not inside_signature:
173 slf = re_single_line_field.match(line)
175 field = slf.groups()[0].lower()
176 changes[field] = slf.groups()[1]
180 changes[field] += '\n'
182 mlf = re_multi_line_field.match(line)
185 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
186 if first == 1 and changes[field] != "":
187 changes[field] += '\n'
189 changes[field] += mlf.groups()[0] + '\n'
193 if signing_rules == 1 and inside_signature:
194 raise InvalidDscError, index
197 changes["filecontents"] = "".join(lines)
199 if changes.has_key("source"):
200 # Strip the source version in brackets from the source field,
201 # put it in the "source-version" field instead.
202 srcver = re_srchasver.search(changes["source"])
204 changes["source"] = srcver.group(1)
205 changes["source-version"] = srcver.group(2)
208 raise ParseChangesError, error
212 ################################################################################
214 def create_hash (lfiles, key, testfn, basedict = None):
216 for f in lfiles.keys():
218 file_handle = open_file(f)
219 except CantOpenError:
220 rejmsg.append("Could not open file %s for checksumming" % (f))
223 if basedict and basedict.has_key(f):
224 basedict[f]['%ssum' % key] = testfn(file_handle)
229 ################################################################################
231 def check_hash (where, lfiles, key, testfn, basedict = None):
234 for f in basedict.keys():
236 rejmsg.append("%s: no %s checksum" % (f, key))
238 for f in lfiles.keys():
239 if basedict and f not in basedict:
240 rejmsg.append("%s: extraneous entry in %s checksums" % (f, key))
243 file_handle = open_file(f)
244 except CantOpenError:
248 if testfn(file_handle) != lfiles[f][key]:
249 rejmsg.append("%s: %s check failed." % (f, key))
251 # Store the hashes for later use
253 basedict[f]['%ssum' % key] = lfiles[f][key]
255 actual_size = os.stat(f)[stat.ST_SIZE]
256 size = int(lfiles[f]["size"])
257 if size != actual_size:
258 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
259 % (f, actual_size, size, where))
263 ################################################################################
265 def ensure_hashes(changes, dsc, files, dsc_files):
266 # Make sure we recognise the format of the Files: field
267 format = changes.get("format", "0.0").split(".",1)
269 format = int(format[0]), int(format[1])
271 format = int(float(format[0])), 0
275 if x.startswith("checksum-"):
276 h = x.split("-",1)[1]
277 if h not in dict(known_hashes):
278 rejmsg.append("Unsupported checksum field in .changes" % (h))
281 if x.startswith("checksum-"):
282 h = x.split("-",1)[1]
283 if h not in dict(known_hashes):
284 rejmsg.append("Unsupported checksum field in .dsc" % (h))
286 # We have to calculate the hash if we have an earlier changes version than
287 # the hash appears in rather than require it exist in the changes file
288 # I hate backwards compatibility
289 for h,f,v in known_hashes:
292 for m in create_hash(files, h, f, files):
295 for m in check_hash(".changes %s" % (h), files, h, f, files):
297 except NoFilesFieldError:
298 rejmsg.append("No Checksums-%s: field in .changes" % (h))
299 except UnknownFormatError, format:
300 rejmsg.append("%s: unknown format of .changes" % (format))
301 except ParseChangesError, line:
302 rejmsg.append("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
304 if "source" not in changes["architecture"]: continue
308 for m in create_hash(dsc_files, h, f, dsc_files):
311 for m in check_hash(".dsc %s" % (h), dsc_files, h, f, dsc_files):
313 except UnknownFormatError, format:
314 rejmsg.append("%s: unknown format of .dsc" % (format))
315 except NoFilesFieldError:
316 rejmsg.append("No Checksums-%s: field in .dsc" % (h))
317 except ParseChangesError, line:
318 rejmsg.append("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
322 ################################################################################
324 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
326 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
329 # Make sure we have a Files: field to parse...
330 if not changes.has_key(field):
331 raise NoFilesFieldError
333 # Make sure we recognise the format of the Files: field
334 format = re_verwithext.search(changes.get("format", "0.0"))
336 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
338 format = format.groups()
339 if format[1] == None:
340 format = int(float(format[0])), 0, format[2]
342 format = int(format[0]), int(format[1]), format[2]
343 if format[2] == None:
348 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
350 if (format < (1,5) or format > (1,8)):
351 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
352 if field != "files" and format < (1,8):
353 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
355 includes_section = (not is_a_dsc) and field == "files"
357 # Parse each entry/line:
358 for i in changes[field].split('\n'):
362 section = priority = ""
365 (md5, size, section, priority, name) = s
367 (md5, size, name) = s
369 raise ParseChangesError, i
376 (section, component) = extract_component_from_section(section)
378 files[name] = Dict(size=size, section=section,
379 priority=priority, component=component)
380 files[name][hashname] = md5
384 ################################################################################
386 def force_to_utf8(s):
387 """Forces a string to UTF-8. If the string isn't already UTF-8,
388 it's assumed to be ISO-8859-1."""
393 latin1_s = unicode(s,'iso8859-1')
394 return latin1_s.encode('utf-8')
396 def rfc2047_encode(s):
397 """Encodes a (header) string per RFC2047 if necessary. If the
398 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
400 codecs.lookup('ascii')[1](s)
405 codecs.lookup('utf-8')[1](s)
406 h = email.Header.Header(s, 'utf-8', 998)
409 h = email.Header.Header(s, 'iso-8859-1', 998)
412 ################################################################################
414 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
415 # with it. I know - I'll fix the suckage and make things
418 def fix_maintainer (maintainer):
419 """Parses a Maintainer or Changed-By field and returns:
420 (1) an RFC822 compatible version,
421 (2) an RFC2047 compatible version,
425 The name is forced to UTF-8 for both (1) and (3). If the name field
426 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
427 switched to 'email (name)' format."""
428 maintainer = maintainer.strip()
430 return ('', '', '', '')
432 if maintainer.find("<") == -1:
435 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
436 email = maintainer[1:-1]
439 m = re_parse_maintainer.match(maintainer)
441 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
445 # Get an RFC2047 compliant version of the name
446 rfc2047_name = rfc2047_encode(name)
448 # Force the name to be UTF-8
449 name = force_to_utf8(name)
451 if name.find(',') != -1 or name.find('.') != -1:
452 rfc822_maint = "%s (%s)" % (email, name)
453 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
455 rfc822_maint = "%s <%s>" % (name, email)
456 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
458 if email.find("@") == -1 and email.find("buildd_") != 0:
459 raise ParseMaintError, "No @ found in email address part."
461 return (rfc822_maint, rfc2047_maint, name, email)
463 ################################################################################
465 # sendmail wrapper, takes _either_ a message string or a file as arguments
466 def send_mail (message, filename=""):
467 # If we've been passed a string dump it into a temporary file
469 filename = tempfile.mktemp()
470 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
471 os.write (fd, message)
475 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
477 raise SendmailFailedError, output
479 # Clean up any temporary files
483 ################################################################################
485 def poolify (source, component):
488 if source[:3] == "lib":
489 return component + source[:4] + '/' + source + '/'
491 return component + source[:1] + '/' + source + '/'
493 ################################################################################
495 def move (src, dest, overwrite = 0, perms = 0664):
496 if os.path.exists(dest) and os.path.isdir(dest):
499 dest_dir = os.path.dirname(dest)
500 if not os.path.exists(dest_dir):
501 umask = os.umask(00000)
502 os.makedirs(dest_dir, 02775)
504 #print "Moving %s to %s..." % (src, dest)
505 if os.path.exists(dest) and os.path.isdir(dest):
506 dest += '/' + os.path.basename(src)
507 # Don't overwrite unless forced to
508 if os.path.exists(dest):
510 fubar("Can't move %s to %s - file already exists." % (src, dest))
512 if not os.access(dest, os.W_OK):
513 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
514 shutil.copy2(src, dest)
515 os.chmod(dest, perms)
518 def copy (src, dest, overwrite = 0, perms = 0664):
519 if os.path.exists(dest) and os.path.isdir(dest):
522 dest_dir = os.path.dirname(dest)
523 if not os.path.exists(dest_dir):
524 umask = os.umask(00000)
525 os.makedirs(dest_dir, 02775)
527 #print "Copying %s to %s..." % (src, dest)
528 if os.path.exists(dest) and os.path.isdir(dest):
529 dest += '/' + os.path.basename(src)
530 # Don't overwrite unless forced to
531 if os.path.exists(dest):
533 raise FileExistsError
535 if not os.access(dest, os.W_OK):
536 raise CantOverwriteError
537 shutil.copy2(src, dest)
538 os.chmod(dest, perms)
540 ################################################################################
543 res = socket.gethostbyaddr(socket.gethostname())
544 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
545 if database_hostname:
546 return database_hostname
550 def which_conf_file ():
551 res = socket.gethostbyaddr(socket.gethostname())
552 if Cnf.get("Config::" + res[0] + "::DakConfig"):
553 return Cnf["Config::" + res[0] + "::DakConfig"]
555 return default_config
557 def which_apt_conf_file ():
558 res = socket.gethostbyaddr(socket.gethostname())
559 if Cnf.get("Config::" + res[0] + "::AptConfig"):
560 return Cnf["Config::" + res[0] + "::AptConfig"]
562 return default_apt_config
564 def which_alias_file():
565 hostname = socket.gethostbyaddr(socket.gethostname())[0]
566 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
567 if os.path.exists(aliasfn):
572 ################################################################################
574 # Escape characters which have meaning to SQL's regex comparison operator ('~')
575 # (woefully incomplete)
578 s = s.replace('+', '\\\\+')
579 s = s.replace('.', '\\\\.')
582 ################################################################################
584 # Perform a substition of template
585 def TemplateSubst(map, filename):
586 file = open_file(filename)
587 template = file.read()
589 template = template.replace(x,map[x])
593 ################################################################################
595 def fubar(msg, exit_code=1):
596 sys.stderr.write("E: %s\n" % (msg))
600 sys.stderr.write("W: %s\n" % (msg))
602 ################################################################################
604 # Returns the user name with a laughable attempt at rfc822 conformancy
605 # (read: removing stray periods).
607 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
609 ################################################################################
619 return ("%d%s" % (c, t))
621 ################################################################################
623 def cc_fix_changes (changes):
624 o = changes.get("architecture", "")
626 del changes["architecture"]
627 changes["architecture"] = {}
629 changes["architecture"][j] = 1
631 # Sort by source name, source version, 'have source', and then by filename
632 def changes_compare (a, b):
634 a_changes = parse_changes(a)
639 b_changes = parse_changes(b)
643 cc_fix_changes (a_changes)
644 cc_fix_changes (b_changes)
646 # Sort by source name
647 a_source = a_changes.get("source")
648 b_source = b_changes.get("source")
649 q = cmp (a_source, b_source)
653 # Sort by source version
654 a_version = a_changes.get("version", "0")
655 b_version = b_changes.get("version", "0")
656 q = apt_pkg.VersionCompare(a_version, b_version)
660 # Sort by 'have source'
661 a_has_source = a_changes["architecture"].get("source")
662 b_has_source = b_changes["architecture"].get("source")
663 if a_has_source and not b_has_source:
665 elif b_has_source and not a_has_source:
668 # Fall back to sort by filename
671 ################################################################################
673 def find_next_free (dest, too_many=100):
676 while os.path.exists(dest) and extra < too_many:
677 dest = orig_dest + '.' + repr(extra)
679 if extra >= too_many:
680 raise NoFreeFilenameError
683 ################################################################################
685 def result_join (original, sep = '\t'):
687 for i in xrange(len(original)):
688 if original[i] == None:
691 list.append(original[i])
692 return sep.join(list)
694 ################################################################################
696 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
698 for line in str.split('\n'):
700 if line or include_blank_lines:
701 out += "%s%s\n" % (prefix, line)
702 # Strip trailing new line
707 ################################################################################
709 def validate_changes_file_arg(filename, require_changes=1):
710 """'filename' is either a .changes or .dak file. If 'filename' is a
711 .dak file, it's changed to be the corresponding .changes file. The
712 function then checks if the .changes file a) exists and b) is
713 readable and returns the .changes filename if so. If there's a
714 problem, the next action depends on the option 'require_changes'
717 o If 'require_changes' == -1, errors are ignored and the .changes
718 filename is returned.
719 o If 'require_changes' == 0, a warning is given and 'None' is returned.
720 o If 'require_changes' == 1, a fatal error is raised.
724 orig_filename = filename
725 if filename.endswith(".dak"):
726 filename = filename[:-4]+".changes"
728 if not filename.endswith(".changes"):
729 error = "invalid file type; not a changes file"
731 if not os.access(filename,os.R_OK):
732 if os.path.exists(filename):
733 error = "permission denied"
735 error = "file not found"
738 if require_changes == 1:
739 fubar("%s: %s." % (orig_filename, error))
740 elif require_changes == 0:
741 warn("Skipping %s - %s" % (orig_filename, error))
743 else: # We only care about the .dak file
748 ################################################################################
751 return (arch != "source" and arch != "all")
753 ################################################################################
755 def join_with_commas_and(list):
756 if len(list) == 0: return "nothing"
757 if len(list) == 1: return list[0]
758 return ", ".join(list[:-1]) + " and " + list[-1]
760 ################################################################################
765 (pkg, version, constraint) = atom
767 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
770 pp_deps.append(pp_dep)
771 return " |".join(pp_deps)
773 ################################################################################
778 ################################################################################
780 # Handle -a, -c and -s arguments; returns them as SQL constraints
781 def parse_args(Options):
785 for suite in split_args(Options["Suite"]):
786 suite_id = database.get_suite_id(suite)
788 warn("suite '%s' not recognised." % (suite))
790 suite_ids_list.append(suite_id)
792 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
794 fubar("No valid suite given.")
799 if Options["Component"]:
800 component_ids_list = []
801 for component in split_args(Options["Component"]):
802 component_id = database.get_component_id(component)
803 if component_id == -1:
804 warn("component '%s' not recognised." % (component))
806 component_ids_list.append(component_id)
807 if component_ids_list:
808 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
810 fubar("No valid component given.")
814 # Process architecture
815 con_architectures = ""
816 if Options["Architecture"]:
819 for architecture in split_args(Options["Architecture"]):
820 if architecture == "source":
823 architecture_id = database.get_architecture_id(architecture)
824 if architecture_id == -1:
825 warn("architecture '%s' not recognised." % (architecture))
827 arch_ids_list.append(architecture_id)
829 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
832 fubar("No valid architecture given.")
836 return (con_suites, con_architectures, con_components, check_source)
838 ################################################################################
840 # Inspired(tm) by Bryn Keller's print_exc_plus (See
841 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
844 tb = sys.exc_info()[2]
853 traceback.print_exc()
855 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
856 frame.f_code.co_filename,
858 for key, value in frame.f_locals.items():
859 print "\t%20s = " % key,
863 print "<unable to print>"
865 ################################################################################
867 def try_with_debug(function):
875 ################################################################################
877 # Function for use in sorting lists of architectures.
878 # Sorts normally except that 'source' dominates all others.
880 def arch_compare_sw (a, b):
881 if a == "source" and b == "source":
890 ################################################################################
892 # Split command line arguments which can be separated by either commas
893 # or whitespace. If dwim is set, it will complain about string ending
894 # in comma since this usually means someone did 'dak ls -a i386, m68k
895 # foo' or something and the inevitable confusion resulting from 'm68k'
896 # being treated as an argument is undesirable.
898 def split_args (s, dwim=1):
899 if s.find(",") == -1:
902 if s[-1:] == "," and dwim:
903 fubar("split_args: found trailing comma, spurious space maybe?")
906 ################################################################################
908 def Dict(**dict): return dict
910 ########################################
912 # Our very own version of commands.getouputstatus(), hacked to support
914 def gpgv_get_status_output(cmd, status_read, status_write):
915 cmd = ['/bin/sh', '-c', cmd]
916 p2cread, p2cwrite = os.pipe()
917 c2pread, c2pwrite = os.pipe()
918 errout, errin = os.pipe()
928 for i in range(3, 256):
929 if i != status_write:
935 os.execvp(cmd[0], cmd)
941 os.dup2(c2pread, c2pwrite)
942 os.dup2(errout, errin)
946 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
949 r = os.read(fd, 8196)
952 if fd == c2pwrite or fd == errin:
954 elif fd == status_read:
957 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
959 pid, exit_status = os.waitpid(pid, 0)
961 os.close(status_write)
962 os.close(status_read)
972 return output, status, exit_status
974 ################################################################################
976 def process_gpgv_output(status):
977 # Process the status-fd output
980 for line in status.split('\n'):
986 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
988 (gnupg, keyword) = split[:2]
989 if gnupg != "[GNUPG:]":
990 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
993 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
994 internal_error += "found duplicate status token ('%s').\n" % (keyword)
997 keywords[keyword] = args
999 return (keywords, internal_error)
1001 ################################################################################
1003 def retrieve_key (filename, keyserver=None, keyring=None):
1004 """Retrieve the key that signed 'filename' from 'keyserver' and
1005 add it to 'keyring'. Returns nothing on success, or an error message
1008 # Defaults for keyserver and keyring
1010 keyserver = Cnf["Dinstall::KeyServer"]
1012 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1014 # Ensure the filename contains no shell meta-characters or other badness
1015 if not re_taint_free.match(filename):
1016 return "%s: tainted filename" % (filename)
1018 # Invoke gpgv on the file
1019 status_read, status_write = os.pipe();
1020 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1021 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1023 # Process the status-fd output
1024 (keywords, internal_error) = process_gpgv_output(status)
1026 return internal_error
1028 if not keywords.has_key("NO_PUBKEY"):
1029 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1031 fingerprint = keywords["NO_PUBKEY"][0]
1032 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1033 # it'll try to create a lockfile in /dev. A better solution might
1034 # be a tempfile or something.
1035 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1036 % (Cnf["Dinstall::SigningKeyring"])
1037 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1038 % (keyring, keyserver, fingerprint)
1039 (result, output) = commands.getstatusoutput(cmd)
1041 return "'%s' failed with exit code %s" % (cmd, result)
1045 ################################################################################
1047 def gpg_keyring_args(keyrings=None):
1049 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1051 return " ".join(["--keyring %s" % x for x in keyrings])
1053 ################################################################################
1055 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1056 """Check the signature of a file and return the fingerprint if the
1057 signature is valid or 'None' if it's not. The first argument is the
1058 filename whose signature should be checked. The second argument is a
1059 reject function and is called when an error is found. The reject()
1060 function must allow for two arguments: the first is the error message,
1061 the second is an optional prefix string. It's possible for reject()
1062 to be called more than once during an invocation of check_signature().
1063 The third argument is optional and is the name of the files the
1064 detached signature applies to. The fourth argument is optional and is
1065 a *list* of keyrings to use. 'autofetch' can either be None, True or
1066 False. If None, the default behaviour specified in the config will be
1069 # Ensure the filename contains no shell meta-characters or other badness
1070 if not re_taint_free.match(sig_filename):
1071 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1074 if data_filename and not re_taint_free.match(data_filename):
1075 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1079 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1081 # Autofetch the signing key if that's enabled
1082 if autofetch == None:
1083 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1085 error_msg = retrieve_key(sig_filename)
1090 # Build the command line
1091 status_read, status_write = os.pipe();
1092 cmd = "gpgv --status-fd %s %s %s %s" % (
1093 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1095 # Invoke gpgv on the file
1096 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1098 # Process the status-fd output
1099 (keywords, internal_error) = process_gpgv_output(status)
1101 # If we failed to parse the status-fd output, let's just whine and bail now
1103 reject("internal error while performing signature check on %s." % (sig_filename))
1104 reject(internal_error, "")
1105 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1109 # Now check for obviously bad things in the processed output
1110 if keywords.has_key("KEYREVOKED"):
1111 reject("The key used to sign %s has been revoked." % (sig_filename))
1113 if keywords.has_key("BADSIG"):
1114 reject("bad signature on %s." % (sig_filename))
1116 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1117 reject("failed to check signature on %s." % (sig_filename))
1119 if keywords.has_key("NO_PUBKEY"):
1120 args = keywords["NO_PUBKEY"]
1123 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1125 if keywords.has_key("BADARMOR"):
1126 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1128 if keywords.has_key("NODATA"):
1129 reject("no signature found in %s." % (sig_filename))
1131 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1132 args = keywords["KEYEXPIRED"]
1135 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1141 # Next check gpgv exited with a zero return code
1143 reject("gpgv failed while checking %s." % (sig_filename))
1145 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1147 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1150 # Sanity check the good stuff we expect
1151 if not keywords.has_key("VALIDSIG"):
1152 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1155 args = keywords["VALIDSIG"]
1157 reject("internal error while checking signature on %s." % (sig_filename))
1160 fingerprint = args[0]
1161 if not keywords.has_key("GOODSIG"):
1162 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1164 if not keywords.has_key("SIG_ID"):
1165 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1168 # Finally ensure there's not something we don't recognise
1169 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1170 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1171 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1173 for keyword in keywords.keys():
1174 if not known_keywords.has_key(keyword):
1175 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1183 ################################################################################
1185 def gpg_get_key_addresses(fingerprint):
1186 """retreive email addresses from gpg key uids for a given fingerprint"""
1187 addresses = key_uid_email_cache.get(fingerprint)
1188 if addresses != None:
1191 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1192 % (gpg_keyring_args(), fingerprint)
1193 (result, output) = commands.getstatusoutput(cmd)
1195 for l in output.split('\n'):
1196 m = re_gpg_uid.match(l)
1198 addresses.add(m.group(1))
1199 key_uid_email_cache[fingerprint] = addresses
1202 ################################################################################
1204 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1206 def wrap(paragraph, max_length, prefix=""):
1210 words = paragraph.split()
1213 word_size = len(word)
1214 if word_size > max_length:
1216 s += line + '\n' + prefix
1217 s += word + '\n' + prefix
1220 new_length = len(line) + word_size + 1
1221 if new_length > max_length:
1222 s += line + '\n' + prefix
1235 ################################################################################
1237 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1238 # Returns fixed 'src'
1239 def clean_symlink (src, dest, root):
1240 src = src.replace(root, '', 1)
1241 dest = dest.replace(root, '', 1)
1242 dest = os.path.dirname(dest)
1243 new_src = '../' * len(dest.split('/'))
1244 return new_src + src
1246 ################################################################################
1248 def temp_filename(directory=None, dotprefix=None, perms=0700):
1249 """Return a secure and unique filename by pre-creating it.
1250 If 'directory' is non-null, it will be the directory the file is pre-created in.
1251 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1254 old_tempdir = tempfile.tempdir
1255 tempfile.tempdir = directory
1257 filename = tempfile.mktemp()
1260 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1261 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1265 tempfile.tempdir = old_tempdir
1269 ################################################################################
1271 # checks if the user part of the email is listed in the alias file
1273 def is_email_alias(email):
1275 if alias_cache == None:
1276 aliasfn = which_alias_file()
1279 for l in open(aliasfn):
1280 alias_cache.add(l.split(':')[0])
1281 uid = email.split('@')[0]
1282 return uid in alias_cache
1284 ################################################################################
1288 Cnf = apt_pkg.newConfiguration()
1289 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1291 if which_conf_file() != default_config:
1292 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1294 ################################################################################