4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 ################################################################################
8 # This program is free software; you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation; either version 2 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program; if not, write to the Free Software
20 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 ################################################################################
24 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
25 sys, tempfile, traceback, stat
28 from dak_exceptions import *
30 ################################################################################
32 re_comments = re.compile(r"\#.*")
33 re_no_epoch = re.compile(r"^\d+\:")
34 re_no_revision = re.compile(r"-[^-]+$")
35 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
36 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
37 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
38 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
40 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
41 re_multi_line_field = re.compile(r"^\s(.*)")
42 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
44 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
45 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
47 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
48 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
50 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
52 default_config = "/etc/dak/dak.conf"
53 default_apt_config = "/etc/dak/apt.conf"
56 key_uid_email_cache = {}
58 # (hashname, function, earliest_changes_version)
59 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
60 ("sha256", apt_pkg.sha256sum, (1, 8))]
62 ################################################################################
64 def open_file(filename, mode='r'):
66 f = open(filename, mode)
68 raise CantOpenError, filename
71 ################################################################################
73 def our_raw_input(prompt=""):
75 sys.stdout.write(prompt)
81 sys.stderr.write("\nUser interrupt (^D).\n")
84 ################################################################################
86 def extract_component_from_section(section):
89 if section.find('/') != -1:
90 component = section.split('/')[0]
92 # Expand default component
94 if Cnf.has_key("Component::%s" % section):
99 return (section, component)
101 ################################################################################
103 def parse_changes(filename, signing_rules=0):
104 """Parses a changes file and returns a dictionary where each field is a
105 key. The mandatory first argument is the filename of the .changes
108 signing_rules is an optional argument:
110 o If signing_rules == -1, no signature is required.
111 o If signing_rules == 0 (the default), a signature is required.
112 o If signing_rules == 1, it turns on the same strict format checking
115 The rules for (signing_rules == 1)-mode are:
117 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
118 followed by any PGP header data and must end with a blank line.
120 o The data section must end with a blank line and must be followed by
121 "-----BEGIN PGP SIGNATURE-----".
127 changes_in = open_file(filename)
128 lines = changes_in.readlines()
131 raise ParseChangesError, "[Empty changes file]"
133 # Reindex by line number so we can easily verify the format of
139 indexed_lines[index] = line[:-1]
143 num_of_lines = len(indexed_lines.keys())
146 while index < num_of_lines:
148 line = indexed_lines[index]
150 if signing_rules == 1:
152 if index > num_of_lines:
153 raise InvalidDscError, index
154 line = indexed_lines[index]
155 if not line.startswith("-----BEGIN PGP SIGNATURE"):
156 raise InvalidDscError, index
161 if line.startswith("-----BEGIN PGP SIGNATURE"):
163 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
165 if signing_rules == 1:
166 while index < num_of_lines and line != "":
168 line = indexed_lines[index]
170 # If we're not inside the signed data, don't process anything
171 if signing_rules >= 0 and not inside_signature:
173 slf = re_single_line_field.match(line)
175 field = slf.groups()[0].lower()
176 changes[field] = slf.groups()[1]
180 changes[field] += '\n'
182 mlf = re_multi_line_field.match(line)
185 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
186 if first == 1 and changes[field] != "":
187 changes[field] += '\n'
189 changes[field] += mlf.groups()[0] + '\n'
193 if signing_rules == 1 and inside_signature:
194 raise InvalidDscError, index
197 changes["filecontents"] = "".join(lines)
199 if changes.has_key("source"):
200 # Strip the source version in brackets from the source field,
201 # put it in the "source-version" field instead.
202 srcver = re_srchasver.search(changes["source"])
204 changes["source"] = srcver.group(1)
205 changes["source-version"] = srcver.group(2)
208 raise ParseChangesError, error
212 ################################################################################
214 def create_hash (lfiles, key, testfn, basedict = None):
216 for f in lfiles.keys():
218 file_handle = open_file(f)
219 except CantOpenError:
220 rejmsg.append("Could not open file %s for checksumming" % (f))
224 basedict[f]['%ssum' % key] = testfn(file_handle)
229 ################################################################################
231 def check_hash (where, lfiles, key, testfn, basedict = None):
234 for f in basedict.keys():
236 rejmsg.append("%s: no %s checksum" % (f, key))
238 for f in lfiles.keys():
239 if basedict and f not in basedict:
240 rejmsg.append("%s: extraneous entry in %s checksums" % (f, key))
243 file_handle = open_file(f)
244 except CantOpenError:
248 if testfn(file_handle) != lfiles[f][key]:
249 rejmsg.append("%s: %s check failed." % (f, key))
251 # Store the hashes for later use
253 basedict[f]['%ssum' % key] = lfiles[f][key]
255 actual_size = os.stat(f)[stat.ST_SIZE]
256 size = int(lfiles[f]["size"])
257 if size != actual_size:
258 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
259 % (f, actual_size, size, where))
263 ################################################################################
265 def ensure_hashes(Upload):
267 for x in Upload.changes:
268 if x.startswith("checksum-"):
269 h = x.split("-",1)[1]
270 if h not in dict(known_hashes):
271 rejmsg.append("Unsupported checksum field in .changes" % (h))
274 if x.startswith("checksum-"):
275 h = x.split("-",1)[1]
276 if h not in dict(known_hashes):
277 rejmsg.append("Unsupported checksum field in .dsc" % (h))
279 # We have to calculate the hash if we have an earlier changes version than
280 # the hash appears in rather than require it exist in the changes file
281 # I hate backwards compatibility
282 for h,f,v in known_hashes:
284 fs = build_file_list(Upload.changes, 0, "checksums-%s" % h, h)
286 for m in create_hash(fs, h, f, Upload.files):
289 for m in check_hash(".changes %s" % (h), fs, h, f, Upload.files):
291 except NoFilesFieldError:
292 rejmsg.append("No Checksums-%s: field in .changes" % (h))
293 except UnknownFormatError, format:
294 rejmsg.append("%s: unknown format of .changes" % (format))
295 except ParseChangesError, line:
296 rejmsg.append("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
298 if "source" not in Upload.changes["architecture"]: continue
301 fs = build_file_list(Upload.dsc, 1, "checksums-%s" % h, h)
303 for m in create_hash(fs, h, f, Upload.dsc_files):
306 for m in check_hash(".dsc %s" % (h), fs, h, f, Upload.dsc_files):
308 except UnknownFormatError, format:
309 rejmsg.append("%s: unknown format of .dsc" % (format))
310 except NoFilesFieldError:
311 rejmsg.append("No Checksums-%s: field in .dsc" % (h))
312 except ParseChangesError, line:
313 rejmsg.append("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
317 ################################################################################
319 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
321 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
324 # Make sure we have a Files: field to parse...
325 if not changes.has_key(field):
326 raise NoFilesFieldError
328 # Make sure we recognise the format of the Files: field
329 format = re_verwithext.search(changes.get("format", "0.0"))
331 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
333 format = format.groups()
334 if format[1] == None:
335 format = int(float(format[0])), 0, format[2]
337 format = int(format[0]), int(format[1]), format[2]
338 if format[2] == None:
343 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
345 if (format < (1,5) or format > (1,8)):
346 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
347 if field != "files" and format < (1,8):
348 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
350 includes_section = (not is_a_dsc) and field == "files"
352 # Parse each entry/line:
353 for i in changes[field].split('\n'):
357 section = priority = ""
360 (md5, size, section, priority, name) = s
362 (md5, size, name) = s
364 raise ParseChangesError, i
371 (section, component) = extract_component_from_section(section)
373 files[name] = Dict(size=size, section=section,
374 priority=priority, component=component)
375 files[name][hashname] = md5
379 ################################################################################
381 def force_to_utf8(s):
382 """Forces a string to UTF-8. If the string isn't already UTF-8,
383 it's assumed to be ISO-8859-1."""
388 latin1_s = unicode(s,'iso8859-1')
389 return latin1_s.encode('utf-8')
391 def rfc2047_encode(s):
392 """Encodes a (header) string per RFC2047 if necessary. If the
393 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
395 codecs.lookup('ascii')[1](s)
400 codecs.lookup('utf-8')[1](s)
401 h = email.Header.Header(s, 'utf-8', 998)
404 h = email.Header.Header(s, 'iso-8859-1', 998)
407 ################################################################################
409 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
410 # with it. I know - I'll fix the suckage and make things
413 def fix_maintainer (maintainer):
414 """Parses a Maintainer or Changed-By field and returns:
415 (1) an RFC822 compatible version,
416 (2) an RFC2047 compatible version,
420 The name is forced to UTF-8 for both (1) and (3). If the name field
421 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
422 switched to 'email (name)' format."""
423 maintainer = maintainer.strip()
425 return ('', '', '', '')
427 if maintainer.find("<") == -1:
430 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
431 email = maintainer[1:-1]
434 m = re_parse_maintainer.match(maintainer)
436 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
440 # Get an RFC2047 compliant version of the name
441 rfc2047_name = rfc2047_encode(name)
443 # Force the name to be UTF-8
444 name = force_to_utf8(name)
446 if name.find(',') != -1 or name.find('.') != -1:
447 rfc822_maint = "%s (%s)" % (email, name)
448 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
450 rfc822_maint = "%s <%s>" % (name, email)
451 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
453 if email.find("@") == -1 and email.find("buildd_") != 0:
454 raise ParseMaintError, "No @ found in email address part."
456 return (rfc822_maint, rfc2047_maint, name, email)
458 ################################################################################
460 # sendmail wrapper, takes _either_ a message string or a file as arguments
461 def send_mail (message, filename=""):
462 # If we've been passed a string dump it into a temporary file
464 filename = tempfile.mktemp()
465 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
466 os.write (fd, message)
470 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
472 raise SendmailFailedError, output
474 # Clean up any temporary files
478 ################################################################################
480 def poolify (source, component):
483 if source[:3] == "lib":
484 return component + source[:4] + '/' + source + '/'
486 return component + source[:1] + '/' + source + '/'
488 ################################################################################
490 def move (src, dest, overwrite = 0, perms = 0664):
491 if os.path.exists(dest) and os.path.isdir(dest):
494 dest_dir = os.path.dirname(dest)
495 if not os.path.exists(dest_dir):
496 umask = os.umask(00000)
497 os.makedirs(dest_dir, 02775)
499 #print "Moving %s to %s..." % (src, dest)
500 if os.path.exists(dest) and os.path.isdir(dest):
501 dest += '/' + os.path.basename(src)
502 # Don't overwrite unless forced to
503 if os.path.exists(dest):
505 fubar("Can't move %s to %s - file already exists." % (src, dest))
507 if not os.access(dest, os.W_OK):
508 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
509 shutil.copy2(src, dest)
510 os.chmod(dest, perms)
513 def copy (src, dest, overwrite = 0, perms = 0664):
514 if os.path.exists(dest) and os.path.isdir(dest):
517 dest_dir = os.path.dirname(dest)
518 if not os.path.exists(dest_dir):
519 umask = os.umask(00000)
520 os.makedirs(dest_dir, 02775)
522 #print "Copying %s to %s..." % (src, dest)
523 if os.path.exists(dest) and os.path.isdir(dest):
524 dest += '/' + os.path.basename(src)
525 # Don't overwrite unless forced to
526 if os.path.exists(dest):
528 raise FileExistsError
530 if not os.access(dest, os.W_OK):
531 raise CantOverwriteError
532 shutil.copy2(src, dest)
533 os.chmod(dest, perms)
535 ################################################################################
538 res = socket.gethostbyaddr(socket.gethostname())
539 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
540 if database_hostname:
541 return database_hostname
545 def which_conf_file ():
546 res = socket.gethostbyaddr(socket.gethostname())
547 if Cnf.get("Config::" + res[0] + "::DakConfig"):
548 return Cnf["Config::" + res[0] + "::DakConfig"]
550 return default_config
552 def which_apt_conf_file ():
553 res = socket.gethostbyaddr(socket.gethostname())
554 if Cnf.get("Config::" + res[0] + "::AptConfig"):
555 return Cnf["Config::" + res[0] + "::AptConfig"]
557 return default_apt_config
559 def which_alias_file():
560 hostname = socket.gethostbyaddr(socket.gethostname())[0]
561 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
562 if os.path.exists(aliasfn):
567 ################################################################################
569 # Escape characters which have meaning to SQL's regex comparison operator ('~')
570 # (woefully incomplete)
573 s = s.replace('+', '\\\\+')
574 s = s.replace('.', '\\\\.')
577 ################################################################################
579 # Perform a substition of template
580 def TemplateSubst(map, filename):
581 file = open_file(filename)
582 template = file.read()
584 template = template.replace(x,map[x])
588 ################################################################################
590 def fubar(msg, exit_code=1):
591 sys.stderr.write("E: %s\n" % (msg))
595 sys.stderr.write("W: %s\n" % (msg))
597 ################################################################################
599 # Returns the user name with a laughable attempt at rfc822 conformancy
600 # (read: removing stray periods).
602 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
604 ################################################################################
614 return ("%d%s" % (c, t))
616 ################################################################################
618 def cc_fix_changes (changes):
619 o = changes.get("architecture", "")
621 del changes["architecture"]
622 changes["architecture"] = {}
624 changes["architecture"][j] = 1
626 # Sort by source name, source version, 'have source', and then by filename
627 def changes_compare (a, b):
629 a_changes = parse_changes(a)
634 b_changes = parse_changes(b)
638 cc_fix_changes (a_changes)
639 cc_fix_changes (b_changes)
641 # Sort by source name
642 a_source = a_changes.get("source")
643 b_source = b_changes.get("source")
644 q = cmp (a_source, b_source)
648 # Sort by source version
649 a_version = a_changes.get("version", "0")
650 b_version = b_changes.get("version", "0")
651 q = apt_pkg.VersionCompare(a_version, b_version)
655 # Sort by 'have source'
656 a_has_source = a_changes["architecture"].get("source")
657 b_has_source = b_changes["architecture"].get("source")
658 if a_has_source and not b_has_source:
660 elif b_has_source and not a_has_source:
663 # Fall back to sort by filename
666 ################################################################################
668 def find_next_free (dest, too_many=100):
671 while os.path.exists(dest) and extra < too_many:
672 dest = orig_dest + '.' + repr(extra)
674 if extra >= too_many:
675 raise NoFreeFilenameError
678 ################################################################################
680 def result_join (original, sep = '\t'):
682 for i in xrange(len(original)):
683 if original[i] == None:
686 list.append(original[i])
687 return sep.join(list)
689 ################################################################################
691 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
693 for line in str.split('\n'):
695 if line or include_blank_lines:
696 out += "%s%s\n" % (prefix, line)
697 # Strip trailing new line
702 ################################################################################
704 def validate_changes_file_arg(filename, require_changes=1):
705 """'filename' is either a .changes or .dak file. If 'filename' is a
706 .dak file, it's changed to be the corresponding .changes file. The
707 function then checks if the .changes file a) exists and b) is
708 readable and returns the .changes filename if so. If there's a
709 problem, the next action depends on the option 'require_changes'
712 o If 'require_changes' == -1, errors are ignored and the .changes
713 filename is returned.
714 o If 'require_changes' == 0, a warning is given and 'None' is returned.
715 o If 'require_changes' == 1, a fatal error is raised.
719 orig_filename = filename
720 if filename.endswith(".dak"):
721 filename = filename[:-4]+".changes"
723 if not filename.endswith(".changes"):
724 error = "invalid file type; not a changes file"
726 if not os.access(filename,os.R_OK):
727 if os.path.exists(filename):
728 error = "permission denied"
730 error = "file not found"
733 if require_changes == 1:
734 fubar("%s: %s." % (orig_filename, error))
735 elif require_changes == 0:
736 warn("Skipping %s - %s" % (orig_filename, error))
738 else: # We only care about the .dak file
743 ################################################################################
746 return (arch != "source" and arch != "all")
748 ################################################################################
750 def join_with_commas_and(list):
751 if len(list) == 0: return "nothing"
752 if len(list) == 1: return list[0]
753 return ", ".join(list[:-1]) + " and " + list[-1]
755 ################################################################################
760 (pkg, version, constraint) = atom
762 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
765 pp_deps.append(pp_dep)
766 return " |".join(pp_deps)
768 ################################################################################
773 ################################################################################
775 # Handle -a, -c and -s arguments; returns them as SQL constraints
776 def parse_args(Options):
780 for suite in split_args(Options["Suite"]):
781 suite_id = database.get_suite_id(suite)
783 warn("suite '%s' not recognised." % (suite))
785 suite_ids_list.append(suite_id)
787 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
789 fubar("No valid suite given.")
794 if Options["Component"]:
795 component_ids_list = []
796 for component in split_args(Options["Component"]):
797 component_id = database.get_component_id(component)
798 if component_id == -1:
799 warn("component '%s' not recognised." % (component))
801 component_ids_list.append(component_id)
802 if component_ids_list:
803 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
805 fubar("No valid component given.")
809 # Process architecture
810 con_architectures = ""
811 if Options["Architecture"]:
814 for architecture in split_args(Options["Architecture"]):
815 if architecture == "source":
818 architecture_id = database.get_architecture_id(architecture)
819 if architecture_id == -1:
820 warn("architecture '%s' not recognised." % (architecture))
822 arch_ids_list.append(architecture_id)
824 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
827 fubar("No valid architecture given.")
831 return (con_suites, con_architectures, con_components, check_source)
833 ################################################################################
835 # Inspired(tm) by Bryn Keller's print_exc_plus (See
836 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
839 tb = sys.exc_info()[2]
848 traceback.print_exc()
850 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
851 frame.f_code.co_filename,
853 for key, value in frame.f_locals.items():
854 print "\t%20s = " % key,
858 print "<unable to print>"
860 ################################################################################
862 def try_with_debug(function):
870 ################################################################################
872 # Function for use in sorting lists of architectures.
873 # Sorts normally except that 'source' dominates all others.
875 def arch_compare_sw (a, b):
876 if a == "source" and b == "source":
885 ################################################################################
887 # Split command line arguments which can be separated by either commas
888 # or whitespace. If dwim is set, it will complain about string ending
889 # in comma since this usually means someone did 'dak ls -a i386, m68k
890 # foo' or something and the inevitable confusion resulting from 'm68k'
891 # being treated as an argument is undesirable.
893 def split_args (s, dwim=1):
894 if s.find(",") == -1:
897 if s[-1:] == "," and dwim:
898 fubar("split_args: found trailing comma, spurious space maybe?")
901 ################################################################################
903 def Dict(**dict): return dict
905 ########################################
907 # Our very own version of commands.getouputstatus(), hacked to support
909 def gpgv_get_status_output(cmd, status_read, status_write):
910 cmd = ['/bin/sh', '-c', cmd]
911 p2cread, p2cwrite = os.pipe()
912 c2pread, c2pwrite = os.pipe()
913 errout, errin = os.pipe()
923 for i in range(3, 256):
924 if i != status_write:
930 os.execvp(cmd[0], cmd)
936 os.dup2(c2pread, c2pwrite)
937 os.dup2(errout, errin)
941 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
944 r = os.read(fd, 8196)
947 if fd == c2pwrite or fd == errin:
949 elif fd == status_read:
952 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
954 pid, exit_status = os.waitpid(pid, 0)
956 os.close(status_write)
957 os.close(status_read)
967 return output, status, exit_status
969 ################################################################################
971 def process_gpgv_output(status):
972 # Process the status-fd output
975 for line in status.split('\n'):
981 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
983 (gnupg, keyword) = split[:2]
984 if gnupg != "[GNUPG:]":
985 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
988 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
989 internal_error += "found duplicate status token ('%s').\n" % (keyword)
992 keywords[keyword] = args
994 return (keywords, internal_error)
996 ################################################################################
998 def retrieve_key (filename, keyserver=None, keyring=None):
999 """Retrieve the key that signed 'filename' from 'keyserver' and
1000 add it to 'keyring'. Returns nothing on success, or an error message
1003 # Defaults for keyserver and keyring
1005 keyserver = Cnf["Dinstall::KeyServer"]
1007 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1009 # Ensure the filename contains no shell meta-characters or other badness
1010 if not re_taint_free.match(filename):
1011 return "%s: tainted filename" % (filename)
1013 # Invoke gpgv on the file
1014 status_read, status_write = os.pipe();
1015 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1016 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1018 # Process the status-fd output
1019 (keywords, internal_error) = process_gpgv_output(status)
1021 return internal_error
1023 if not keywords.has_key("NO_PUBKEY"):
1024 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1026 fingerprint = keywords["NO_PUBKEY"][0]
1027 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1028 # it'll try to create a lockfile in /dev. A better solution might
1029 # be a tempfile or something.
1030 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1031 % (Cnf["Dinstall::SigningKeyring"])
1032 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1033 % (keyring, keyserver, fingerprint)
1034 (result, output) = commands.getstatusoutput(cmd)
1036 return "'%s' failed with exit code %s" % (cmd, result)
1040 ################################################################################
1042 def gpg_keyring_args(keyrings=None):
1044 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1046 return " ".join(["--keyring %s" % x for x in keyrings])
1048 ################################################################################
1050 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1051 """Check the signature of a file and return the fingerprint if the
1052 signature is valid or 'None' if it's not. The first argument is the
1053 filename whose signature should be checked. The second argument is a
1054 reject function and is called when an error is found. The reject()
1055 function must allow for two arguments: the first is the error message,
1056 the second is an optional prefix string. It's possible for reject()
1057 to be called more than once during an invocation of check_signature().
1058 The third argument is optional and is the name of the files the
1059 detached signature applies to. The fourth argument is optional and is
1060 a *list* of keyrings to use. 'autofetch' can either be None, True or
1061 False. If None, the default behaviour specified in the config will be
1064 # Ensure the filename contains no shell meta-characters or other badness
1065 if not re_taint_free.match(sig_filename):
1066 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1069 if data_filename and not re_taint_free.match(data_filename):
1070 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1074 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1076 # Autofetch the signing key if that's enabled
1077 if autofetch == None:
1078 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1080 error_msg = retrieve_key(sig_filename)
1085 # Build the command line
1086 status_read, status_write = os.pipe();
1087 cmd = "gpgv --status-fd %s %s %s %s" % (
1088 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1090 # Invoke gpgv on the file
1091 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1093 # Process the status-fd output
1094 (keywords, internal_error) = process_gpgv_output(status)
1096 # If we failed to parse the status-fd output, let's just whine and bail now
1098 reject("internal error while performing signature check on %s." % (sig_filename))
1099 reject(internal_error, "")
1100 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1104 # Now check for obviously bad things in the processed output
1105 if keywords.has_key("KEYREVOKED"):
1106 reject("The key used to sign %s has been revoked." % (sig_filename))
1108 if keywords.has_key("BADSIG"):
1109 reject("bad signature on %s." % (sig_filename))
1111 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1112 reject("failed to check signature on %s." % (sig_filename))
1114 if keywords.has_key("NO_PUBKEY"):
1115 args = keywords["NO_PUBKEY"]
1118 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1120 if keywords.has_key("BADARMOR"):
1121 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1123 if keywords.has_key("NODATA"):
1124 reject("no signature found in %s." % (sig_filename))
1126 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1127 args = keywords["KEYEXPIRED"]
1130 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1136 # Next check gpgv exited with a zero return code
1138 reject("gpgv failed while checking %s." % (sig_filename))
1140 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1142 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1145 # Sanity check the good stuff we expect
1146 if not keywords.has_key("VALIDSIG"):
1147 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1150 args = keywords["VALIDSIG"]
1152 reject("internal error while checking signature on %s." % (sig_filename))
1155 fingerprint = args[0]
1156 if not keywords.has_key("GOODSIG"):
1157 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1159 if not keywords.has_key("SIG_ID"):
1160 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1163 # Finally ensure there's not something we don't recognise
1164 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1165 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1166 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1168 for keyword in keywords.keys():
1169 if not known_keywords.has_key(keyword):
1170 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1178 ################################################################################
1180 def gpg_get_key_addresses(fingerprint):
1181 """retreive email addresses from gpg key uids for a given fingerprint"""
1182 addresses = key_uid_email_cache.get(fingerprint)
1183 if addresses != None:
1186 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1187 % (gpg_keyring_args(), fingerprint)
1188 (result, output) = commands.getstatusoutput(cmd)
1190 for l in output.split('\n'):
1191 m = re_gpg_uid.match(l)
1193 addresses.add(m.group(1))
1194 key_uid_email_cache[fingerprint] = addresses
1197 ################################################################################
1199 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1201 def wrap(paragraph, max_length, prefix=""):
1205 words = paragraph.split()
1208 word_size = len(word)
1209 if word_size > max_length:
1211 s += line + '\n' + prefix
1212 s += word + '\n' + prefix
1215 new_length = len(line) + word_size + 1
1216 if new_length > max_length:
1217 s += line + '\n' + prefix
1230 ################################################################################
1232 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1233 # Returns fixed 'src'
1234 def clean_symlink (src, dest, root):
1235 src = src.replace(root, '', 1)
1236 dest = dest.replace(root, '', 1)
1237 dest = os.path.dirname(dest)
1238 new_src = '../' * len(dest.split('/'))
1239 return new_src + src
1241 ################################################################################
1243 def temp_filename(directory=None, dotprefix=None, perms=0700):
1244 """Return a secure and unique filename by pre-creating it.
1245 If 'directory' is non-null, it will be the directory the file is pre-created in.
1246 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1249 old_tempdir = tempfile.tempdir
1250 tempfile.tempdir = directory
1252 filename = tempfile.mktemp()
1255 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1256 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1260 tempfile.tempdir = old_tempdir
1264 ################################################################################
1266 # checks if the user part of the email is listed in the alias file
1268 def is_email_alias(email):
1270 if alias_cache == None:
1271 aliasfn = which_alias_file()
1274 for l in open(aliasfn):
1275 alias_cache.add(l.split(':')[0])
1276 uid = email.split('@')[0]
1277 return uid in alias_cache
1279 ################################################################################
1283 Cnf = apt_pkg.newConfiguration()
1284 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1286 if which_conf_file() != default_config:
1287 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1289 ################################################################################