4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 ################################################################################
8 # This program is free software; you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation; either version 2 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program; if not, write to the Free Software
20 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 ################################################################################
24 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
25 sys, tempfile, traceback
28 from dak_exceptions import *
30 ################################################################################
32 re_comments = re.compile(r"\#.*")
33 re_no_epoch = re.compile(r"^\d+\:")
34 re_no_revision = re.compile(r"-[^-]+$")
35 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
36 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
37 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
38 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
40 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
41 re_multi_line_field = re.compile(r"^\s(.*)")
42 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
44 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
45 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
47 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
48 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
50 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
52 default_config = "/etc/dak/dak.conf"
53 default_apt_config = "/etc/dak/apt.conf"
56 key_uid_email_cache = {}
58 # (hashname, function, earliest_changes_version)
59 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
60 ("sha256", apt_pkg.sha256sum, (1, 8))]
62 ################################################################################
64 def open_file(filename, mode='r'):
66 f = open(filename, mode)
68 raise CantOpenError, filename
71 ################################################################################
73 def our_raw_input(prompt=""):
75 sys.stdout.write(prompt)
81 sys.stderr.write("\nUser interrupt (^D).\n")
84 ################################################################################
86 def extract_component_from_section(section):
89 if section.find('/') != -1:
90 component = section.split('/')[0]
92 # Expand default component
94 if Cnf.has_key("Component::%s" % section):
99 return (section, component)
101 ################################################################################
103 def parse_changes(filename, signing_rules=0):
104 """Parses a changes file and returns a dictionary where each field is a
105 key. The mandatory first argument is the filename of the .changes
108 signing_rules is an optional argument:
110 o If signing_rules == -1, no signature is required.
111 o If signing_rules == 0 (the default), a signature is required.
112 o If signing_rules == 1, it turns on the same strict format checking
115 The rules for (signing_rules == 1)-mode are:
117 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
118 followed by any PGP header data and must end with a blank line.
120 o The data section must end with a blank line and must be followed by
121 "-----BEGIN PGP SIGNATURE-----".
127 changes_in = open_file(filename)
128 lines = changes_in.readlines()
131 raise ParseChangesError, "[Empty changes file]"
133 # Reindex by line number so we can easily verify the format of
139 indexed_lines[index] = line[:-1]
143 num_of_lines = len(indexed_lines.keys())
146 while index < num_of_lines:
148 line = indexed_lines[index]
150 if signing_rules == 1:
152 if index > num_of_lines:
153 raise InvalidDscError, index
154 line = indexed_lines[index]
155 if not line.startswith("-----BEGIN PGP SIGNATURE"):
156 raise InvalidDscError, index
161 if line.startswith("-----BEGIN PGP SIGNATURE"):
163 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
165 if signing_rules == 1:
166 while index < num_of_lines and line != "":
168 line = indexed_lines[index]
170 # If we're not inside the signed data, don't process anything
171 if signing_rules >= 0 and not inside_signature:
173 slf = re_single_line_field.match(line)
175 field = slf.groups()[0].lower()
176 changes[field] = slf.groups()[1]
180 changes[field] += '\n'
182 mlf = re_multi_line_field.match(line)
185 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
186 if first == 1 and changes[field] != "":
187 changes[field] += '\n'
189 changes[field] += mlf.groups()[0] + '\n'
193 if signing_rules == 1 and inside_signature:
194 raise InvalidDscError, index
197 changes["filecontents"] = "".join(lines)
199 if changes.has_key("source"):
200 # Strip the source version in brackets from the source field,
201 # put it in the "source-version" field instead.
202 srcver = re_srchasver.search(changes["source"])
204 changes["source"] = srcver.group(1)
205 changes["source-version"] = srcver.group(2)
208 raise ParseChangesError, error
212 ################################################################################
214 def create_hash (lfiles, key, testfn, basedict = None):
216 for f in lfiles.keys():
218 file_handle = open_file(f)
219 except CantOpenError:
220 rejmsg.append("Could not open file %s for checksumming" % (f))
223 basedict[f]['%ssum' % key] = testfn(file_handle)
228 ################################################################################
230 def check_hash (where, lfiles, key, testfn, basedict = None):
233 for f in basedict.keys():
235 rejmsg.append("%s: no %s checksum" % (f, key))
237 for f in lfiles.keys():
238 if basedict and f not in basedict:
239 rejmsg.append("%s: extraneous entry in %s checksums" % (f, key))
242 file_handle = open_file(f)
243 except CantOpenError:
247 if testfn(file_handle) != lfiles[f][key]:
248 rejmsg.append("%s: %s check failed." % (f, key))
250 # Store the hashes for later use
251 basedict[f]['%ssum' % key] = lfiles[f][key]
253 actual_size = os.stat(f)[stat.ST_SIZE]
254 size = int(lfiles[f]["size"])
255 if size != actual_size:
256 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
257 % (f, actual_size, size, where))
261 ################################################################################
263 def ensure_hashes(Upload):
265 for x in Upload.changes:
266 if x.startswith("checksum-"):
267 h = x.split("-",1)[1]
268 if h not in dict(known_hashes):
269 rejmsg.append("Unsupported checksum field in .changes" % (h))
272 if x.startswith("checksum-"):
273 h = x.split("-",1)[1]
274 if h not in dict(known_hashes):
275 rejmsg.append("Unsupported checksum field in .dsc" % (h))
277 # We have to calculate the hash if we have an earlier changes version than
278 # the hash appears in rather than require it exist in the changes file
279 # I hate backwards compatibility
280 for h,f,v in known_hashes:
282 fs = build_file_list(Upload.changes, 0, "checksums-%s" % h, h)
284 for m in create_hash(fs, h, f, Upload.files):
287 for m in check_hash(".changes %s" % (h), fs, h, f, Upload.files):
289 except NoFilesFieldError:
290 rejmsg.append("No Checksums-%s: field in .changes" % (h))
291 except UnknownFormatError, format:
292 rejmsg.append("%s: unknown format of .changes" % (format))
293 except ParseChangesError, line:
294 rejmsg.append("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
296 if "source" not in Upload.changes["architecture"]: continue
299 fs = build_file_list(Upload.dsc, 1, "checksums-%s" % h, h)
301 for m in create_hash(fs, h, f, Upload.dsc_files):
304 for m in check_hash(".dsc %s" % (h), fs, h, f, Upload.dsc_files):
306 except UnknownFormatError, format:
307 rejmsg.append("%s: unknown format of .dsc" % (format))
308 except NoFilesFieldError:
309 rejmsg.append("No Checksums-%s: field in .dsc" % (h))
310 except ParseChangesError, line:
311 rejmsg.append("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
315 ################################################################################
317 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
319 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
322 # Make sure we have a Files: field to parse...
323 if not changes.has_key(field):
324 raise NoFilesFieldError
326 # Make sure we recognise the format of the Files: field
327 format = re_verwithext.search(changes.get("format", "0.0"))
329 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
331 format = format.groups()
332 if format[1] == None:
333 format = int(float(format[0])), 0, format[2]
335 format = int(format[0]), int(format[1]), format[2]
336 if format[2] == None:
341 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
343 if (format < (1,5) or format > (1,8)):
344 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
345 if field != "files" and format < (1,8):
346 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
348 includes_section = (not is_a_dsc) and field == "files"
350 # Parse each entry/line:
351 for i in changes[field].split('\n'):
355 section = priority = ""
358 (md5, size, section, priority, name) = s
360 (md5, size, name) = s
362 raise ParseChangesError, i
369 (section, component) = extract_component_from_section(section)
371 files[name] = Dict(size=size, section=section,
372 priority=priority, component=component)
373 files[name][hashname] = md5
377 ################################################################################
379 def force_to_utf8(s):
380 """Forces a string to UTF-8. If the string isn't already UTF-8,
381 it's assumed to be ISO-8859-1."""
386 latin1_s = unicode(s,'iso8859-1')
387 return latin1_s.encode('utf-8')
389 def rfc2047_encode(s):
390 """Encodes a (header) string per RFC2047 if necessary. If the
391 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
393 codecs.lookup('ascii')[1](s)
398 codecs.lookup('utf-8')[1](s)
399 h = email.Header.Header(s, 'utf-8', 998)
402 h = email.Header.Header(s, 'iso-8859-1', 998)
405 ################################################################################
407 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
408 # with it. I know - I'll fix the suckage and make things
411 def fix_maintainer (maintainer):
412 """Parses a Maintainer or Changed-By field and returns:
413 (1) an RFC822 compatible version,
414 (2) an RFC2047 compatible version,
418 The name is forced to UTF-8 for both (1) and (3). If the name field
419 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
420 switched to 'email (name)' format."""
421 maintainer = maintainer.strip()
423 return ('', '', '', '')
425 if maintainer.find("<") == -1:
428 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
429 email = maintainer[1:-1]
432 m = re_parse_maintainer.match(maintainer)
434 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
438 # Get an RFC2047 compliant version of the name
439 rfc2047_name = rfc2047_encode(name)
441 # Force the name to be UTF-8
442 name = force_to_utf8(name)
444 if name.find(',') != -1 or name.find('.') != -1:
445 rfc822_maint = "%s (%s)" % (email, name)
446 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
448 rfc822_maint = "%s <%s>" % (name, email)
449 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
451 if email.find("@") == -1 and email.find("buildd_") != 0:
452 raise ParseMaintError, "No @ found in email address part."
454 return (rfc822_maint, rfc2047_maint, name, email)
456 ################################################################################
458 # sendmail wrapper, takes _either_ a message string or a file as arguments
459 def send_mail (message, filename=""):
460 # If we've been passed a string dump it into a temporary file
462 filename = tempfile.mktemp()
463 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
464 os.write (fd, message)
468 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
470 raise SendmailFailedError, output
472 # Clean up any temporary files
476 ################################################################################
478 def poolify (source, component):
481 if source[:3] == "lib":
482 return component + source[:4] + '/' + source + '/'
484 return component + source[:1] + '/' + source + '/'
486 ################################################################################
488 def move (src, dest, overwrite = 0, perms = 0664):
489 if os.path.exists(dest) and os.path.isdir(dest):
492 dest_dir = os.path.dirname(dest)
493 if not os.path.exists(dest_dir):
494 umask = os.umask(00000)
495 os.makedirs(dest_dir, 02775)
497 #print "Moving %s to %s..." % (src, dest)
498 if os.path.exists(dest) and os.path.isdir(dest):
499 dest += '/' + os.path.basename(src)
500 # Don't overwrite unless forced to
501 if os.path.exists(dest):
503 fubar("Can't move %s to %s - file already exists." % (src, dest))
505 if not os.access(dest, os.W_OK):
506 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
507 shutil.copy2(src, dest)
508 os.chmod(dest, perms)
511 def copy (src, dest, overwrite = 0, perms = 0664):
512 if os.path.exists(dest) and os.path.isdir(dest):
515 dest_dir = os.path.dirname(dest)
516 if not os.path.exists(dest_dir):
517 umask = os.umask(00000)
518 os.makedirs(dest_dir, 02775)
520 #print "Copying %s to %s..." % (src, dest)
521 if os.path.exists(dest) and os.path.isdir(dest):
522 dest += '/' + os.path.basename(src)
523 # Don't overwrite unless forced to
524 if os.path.exists(dest):
526 raise FileExistsError
528 if not os.access(dest, os.W_OK):
529 raise CantOverwriteError
530 shutil.copy2(src, dest)
531 os.chmod(dest, perms)
533 ################################################################################
536 res = socket.gethostbyaddr(socket.gethostname())
537 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
538 if database_hostname:
539 return database_hostname
543 def which_conf_file ():
544 res = socket.gethostbyaddr(socket.gethostname())
545 if Cnf.get("Config::" + res[0] + "::DakConfig"):
546 return Cnf["Config::" + res[0] + "::DakConfig"]
548 return default_config
550 def which_apt_conf_file ():
551 res = socket.gethostbyaddr(socket.gethostname())
552 if Cnf.get("Config::" + res[0] + "::AptConfig"):
553 return Cnf["Config::" + res[0] + "::AptConfig"]
555 return default_apt_config
557 def which_alias_file():
558 hostname = socket.gethostbyaddr(socket.gethostname())[0]
559 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
560 if os.path.exists(aliasfn):
565 ################################################################################
567 # Escape characters which have meaning to SQL's regex comparison operator ('~')
568 # (woefully incomplete)
571 s = s.replace('+', '\\\\+')
572 s = s.replace('.', '\\\\.')
575 ################################################################################
577 # Perform a substition of template
578 def TemplateSubst(map, filename):
579 file = open_file(filename)
580 template = file.read()
582 template = template.replace(x,map[x])
586 ################################################################################
588 def fubar(msg, exit_code=1):
589 sys.stderr.write("E: %s\n" % (msg))
593 sys.stderr.write("W: %s\n" % (msg))
595 ################################################################################
597 # Returns the user name with a laughable attempt at rfc822 conformancy
598 # (read: removing stray periods).
600 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
602 ################################################################################
612 return ("%d%s" % (c, t))
614 ################################################################################
616 def cc_fix_changes (changes):
617 o = changes.get("architecture", "")
619 del changes["architecture"]
620 changes["architecture"] = {}
622 changes["architecture"][j] = 1
624 # Sort by source name, source version, 'have source', and then by filename
625 def changes_compare (a, b):
627 a_changes = parse_changes(a)
632 b_changes = parse_changes(b)
636 cc_fix_changes (a_changes)
637 cc_fix_changes (b_changes)
639 # Sort by source name
640 a_source = a_changes.get("source")
641 b_source = b_changes.get("source")
642 q = cmp (a_source, b_source)
646 # Sort by source version
647 a_version = a_changes.get("version", "0")
648 b_version = b_changes.get("version", "0")
649 q = apt_pkg.VersionCompare(a_version, b_version)
653 # Sort by 'have source'
654 a_has_source = a_changes["architecture"].get("source")
655 b_has_source = b_changes["architecture"].get("source")
656 if a_has_source and not b_has_source:
658 elif b_has_source and not a_has_source:
661 # Fall back to sort by filename
664 ################################################################################
666 def find_next_free (dest, too_many=100):
669 while os.path.exists(dest) and extra < too_many:
670 dest = orig_dest + '.' + repr(extra)
672 if extra >= too_many:
673 raise NoFreeFilenameError
676 ################################################################################
678 def result_join (original, sep = '\t'):
680 for i in xrange(len(original)):
681 if original[i] == None:
684 list.append(original[i])
685 return sep.join(list)
687 ################################################################################
689 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
691 for line in str.split('\n'):
693 if line or include_blank_lines:
694 out += "%s%s\n" % (prefix, line)
695 # Strip trailing new line
700 ################################################################################
702 def validate_changes_file_arg(filename, require_changes=1):
703 """'filename' is either a .changes or .dak file. If 'filename' is a
704 .dak file, it's changed to be the corresponding .changes file. The
705 function then checks if the .changes file a) exists and b) is
706 readable and returns the .changes filename if so. If there's a
707 problem, the next action depends on the option 'require_changes'
710 o If 'require_changes' == -1, errors are ignored and the .changes
711 filename is returned.
712 o If 'require_changes' == 0, a warning is given and 'None' is returned.
713 o If 'require_changes' == 1, a fatal error is raised.
717 orig_filename = filename
718 if filename.endswith(".dak"):
719 filename = filename[:-4]+".changes"
721 if not filename.endswith(".changes"):
722 error = "invalid file type; not a changes file"
724 if not os.access(filename,os.R_OK):
725 if os.path.exists(filename):
726 error = "permission denied"
728 error = "file not found"
731 if require_changes == 1:
732 fubar("%s: %s." % (orig_filename, error))
733 elif require_changes == 0:
734 warn("Skipping %s - %s" % (orig_filename, error))
736 else: # We only care about the .dak file
741 ################################################################################
744 return (arch != "source" and arch != "all")
746 ################################################################################
748 def join_with_commas_and(list):
749 if len(list) == 0: return "nothing"
750 if len(list) == 1: return list[0]
751 return ", ".join(list[:-1]) + " and " + list[-1]
753 ################################################################################
758 (pkg, version, constraint) = atom
760 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
763 pp_deps.append(pp_dep)
764 return " |".join(pp_deps)
766 ################################################################################
771 ################################################################################
773 # Handle -a, -c and -s arguments; returns them as SQL constraints
774 def parse_args(Options):
778 for suite in split_args(Options["Suite"]):
779 suite_id = database.get_suite_id(suite)
781 warn("suite '%s' not recognised." % (suite))
783 suite_ids_list.append(suite_id)
785 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
787 fubar("No valid suite given.")
792 if Options["Component"]:
793 component_ids_list = []
794 for component in split_args(Options["Component"]):
795 component_id = database.get_component_id(component)
796 if component_id == -1:
797 warn("component '%s' not recognised." % (component))
799 component_ids_list.append(component_id)
800 if component_ids_list:
801 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
803 fubar("No valid component given.")
807 # Process architecture
808 con_architectures = ""
809 if Options["Architecture"]:
812 for architecture in split_args(Options["Architecture"]):
813 if architecture == "source":
816 architecture_id = database.get_architecture_id(architecture)
817 if architecture_id == -1:
818 warn("architecture '%s' not recognised." % (architecture))
820 arch_ids_list.append(architecture_id)
822 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
825 fubar("No valid architecture given.")
829 return (con_suites, con_architectures, con_components, check_source)
831 ################################################################################
833 # Inspired(tm) by Bryn Keller's print_exc_plus (See
834 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
837 tb = sys.exc_info()[2]
846 traceback.print_exc()
848 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
849 frame.f_code.co_filename,
851 for key, value in frame.f_locals.items():
852 print "\t%20s = " % key,
856 print "<unable to print>"
858 ################################################################################
860 def try_with_debug(function):
868 ################################################################################
870 # Function for use in sorting lists of architectures.
871 # Sorts normally except that 'source' dominates all others.
873 def arch_compare_sw (a, b):
874 if a == "source" and b == "source":
883 ################################################################################
885 # Split command line arguments which can be separated by either commas
886 # or whitespace. If dwim is set, it will complain about string ending
887 # in comma since this usually means someone did 'dak ls -a i386, m68k
888 # foo' or something and the inevitable confusion resulting from 'm68k'
889 # being treated as an argument is undesirable.
891 def split_args (s, dwim=1):
892 if s.find(",") == -1:
895 if s[-1:] == "," and dwim:
896 fubar("split_args: found trailing comma, spurious space maybe?")
899 ################################################################################
901 def Dict(**dict): return dict
903 ########################################
905 # Our very own version of commands.getouputstatus(), hacked to support
907 def gpgv_get_status_output(cmd, status_read, status_write):
908 cmd = ['/bin/sh', '-c', cmd]
909 p2cread, p2cwrite = os.pipe()
910 c2pread, c2pwrite = os.pipe()
911 errout, errin = os.pipe()
921 for i in range(3, 256):
922 if i != status_write:
928 os.execvp(cmd[0], cmd)
934 os.dup2(c2pread, c2pwrite)
935 os.dup2(errout, errin)
939 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
942 r = os.read(fd, 8196)
945 if fd == c2pwrite or fd == errin:
947 elif fd == status_read:
950 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
952 pid, exit_status = os.waitpid(pid, 0)
954 os.close(status_write)
955 os.close(status_read)
965 return output, status, exit_status
967 ################################################################################
969 def process_gpgv_output(status):
970 # Process the status-fd output
973 for line in status.split('\n'):
979 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
981 (gnupg, keyword) = split[:2]
982 if gnupg != "[GNUPG:]":
983 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
986 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
987 internal_error += "found duplicate status token ('%s').\n" % (keyword)
990 keywords[keyword] = args
992 return (keywords, internal_error)
994 ################################################################################
996 def retrieve_key (filename, keyserver=None, keyring=None):
997 """Retrieve the key that signed 'filename' from 'keyserver' and
998 add it to 'keyring'. Returns nothing on success, or an error message
1001 # Defaults for keyserver and keyring
1003 keyserver = Cnf["Dinstall::KeyServer"]
1005 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1007 # Ensure the filename contains no shell meta-characters or other badness
1008 if not re_taint_free.match(filename):
1009 return "%s: tainted filename" % (filename)
1011 # Invoke gpgv on the file
1012 status_read, status_write = os.pipe();
1013 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1014 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1016 # Process the status-fd output
1017 (keywords, internal_error) = process_gpgv_output(status)
1019 return internal_error
1021 if not keywords.has_key("NO_PUBKEY"):
1022 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1024 fingerprint = keywords["NO_PUBKEY"][0]
1025 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1026 # it'll try to create a lockfile in /dev. A better solution might
1027 # be a tempfile or something.
1028 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1029 % (Cnf["Dinstall::SigningKeyring"])
1030 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1031 % (keyring, keyserver, fingerprint)
1032 (result, output) = commands.getstatusoutput(cmd)
1034 return "'%s' failed with exit code %s" % (cmd, result)
1038 ################################################################################
1040 def gpg_keyring_args(keyrings=None):
1042 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1044 return " ".join(["--keyring %s" % x for x in keyrings])
1046 ################################################################################
1048 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1049 """Check the signature of a file and return the fingerprint if the
1050 signature is valid or 'None' if it's not. The first argument is the
1051 filename whose signature should be checked. The second argument is a
1052 reject function and is called when an error is found. The reject()
1053 function must allow for two arguments: the first is the error message,
1054 the second is an optional prefix string. It's possible for reject()
1055 to be called more than once during an invocation of check_signature().
1056 The third argument is optional and is the name of the files the
1057 detached signature applies to. The fourth argument is optional and is
1058 a *list* of keyrings to use. 'autofetch' can either be None, True or
1059 False. If None, the default behaviour specified in the config will be
1062 # Ensure the filename contains no shell meta-characters or other badness
1063 if not re_taint_free.match(sig_filename):
1064 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1067 if data_filename and not re_taint_free.match(data_filename):
1068 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1072 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1074 # Autofetch the signing key if that's enabled
1075 if autofetch == None:
1076 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1078 error_msg = retrieve_key(sig_filename)
1083 # Build the command line
1084 status_read, status_write = os.pipe();
1085 cmd = "gpgv --status-fd %s %s %s %s" % (
1086 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1088 # Invoke gpgv on the file
1089 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1091 # Process the status-fd output
1092 (keywords, internal_error) = process_gpgv_output(status)
1094 # If we failed to parse the status-fd output, let's just whine and bail now
1096 reject("internal error while performing signature check on %s." % (sig_filename))
1097 reject(internal_error, "")
1098 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1102 # Now check for obviously bad things in the processed output
1103 if keywords.has_key("KEYREVOKED"):
1104 reject("The key used to sign %s has been revoked." % (sig_filename))
1106 if keywords.has_key("BADSIG"):
1107 reject("bad signature on %s." % (sig_filename))
1109 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1110 reject("failed to check signature on %s." % (sig_filename))
1112 if keywords.has_key("NO_PUBKEY"):
1113 args = keywords["NO_PUBKEY"]
1116 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1118 if keywords.has_key("BADARMOR"):
1119 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1121 if keywords.has_key("NODATA"):
1122 reject("no signature found in %s." % (sig_filename))
1124 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1125 args = keywords["KEYEXPIRED"]
1128 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1134 # Next check gpgv exited with a zero return code
1136 reject("gpgv failed while checking %s." % (sig_filename))
1138 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1140 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1143 # Sanity check the good stuff we expect
1144 if not keywords.has_key("VALIDSIG"):
1145 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1148 args = keywords["VALIDSIG"]
1150 reject("internal error while checking signature on %s." % (sig_filename))
1153 fingerprint = args[0]
1154 if not keywords.has_key("GOODSIG"):
1155 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1157 if not keywords.has_key("SIG_ID"):
1158 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1161 # Finally ensure there's not something we don't recognise
1162 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1163 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1164 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1166 for keyword in keywords.keys():
1167 if not known_keywords.has_key(keyword):
1168 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1176 ################################################################################
1178 def gpg_get_key_addresses(fingerprint):
1179 """retreive email addresses from gpg key uids for a given fingerprint"""
1180 addresses = key_uid_email_cache.get(fingerprint)
1181 if addresses != None:
1184 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1185 % (gpg_keyring_args(), fingerprint)
1186 (result, output) = commands.getstatusoutput(cmd)
1188 for l in output.split('\n'):
1189 m = re_gpg_uid.match(l)
1191 addresses.add(m.group(1))
1192 key_uid_email_cache[fingerprint] = addresses
1195 ################################################################################
1197 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1199 def wrap(paragraph, max_length, prefix=""):
1203 words = paragraph.split()
1206 word_size = len(word)
1207 if word_size > max_length:
1209 s += line + '\n' + prefix
1210 s += word + '\n' + prefix
1213 new_length = len(line) + word_size + 1
1214 if new_length > max_length:
1215 s += line + '\n' + prefix
1228 ################################################################################
1230 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1231 # Returns fixed 'src'
1232 def clean_symlink (src, dest, root):
1233 src = src.replace(root, '', 1)
1234 dest = dest.replace(root, '', 1)
1235 dest = os.path.dirname(dest)
1236 new_src = '../' * len(dest.split('/'))
1237 return new_src + src
1239 ################################################################################
1241 def temp_filename(directory=None, dotprefix=None, perms=0700):
1242 """Return a secure and unique filename by pre-creating it.
1243 If 'directory' is non-null, it will be the directory the file is pre-created in.
1244 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1247 old_tempdir = tempfile.tempdir
1248 tempfile.tempdir = directory
1250 filename = tempfile.mktemp()
1253 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1254 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1258 tempfile.tempdir = old_tempdir
1262 ################################################################################
1264 # checks if the user part of the email is listed in the alias file
1266 def is_email_alias(email):
1268 if alias_cache == None:
1269 aliasfn = which_alias_file()
1272 for l in open(aliasfn):
1273 alias_cache.add(l.split(':')[0])
1274 uid = email.split('@')[0]
1275 return uid in alias_cache
1277 ################################################################################
1281 Cnf = apt_pkg.newConfiguration()
1282 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1284 if which_conf_file() != default_config:
1285 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1287 ################################################################################