4 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
6 ################################################################################
8 # This program is free software; you can redistribute it and/or modify
9 # it under the terms of the GNU General Public License as published by
10 # the Free Software Foundation; either version 2 of the License, or
11 # (at your option) any later version.
13 # This program is distributed in the hope that it will be useful,
14 # but WITHOUT ANY WARRANTY; without even the implied warranty of
15 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 # GNU General Public License for more details.
18 # You should have received a copy of the GNU General Public License
19 # along with this program; if not, write to the Free Software
20 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
22 ################################################################################
24 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
25 sys, tempfile, traceback, stat
28 from dak_exceptions import *
30 ################################################################################
32 re_comments = re.compile(r"\#.*")
33 re_no_epoch = re.compile(r"^\d+\:")
34 re_no_revision = re.compile(r"-[^-]+$")
35 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
36 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
37 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
38 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
40 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
41 re_multi_line_field = re.compile(r"^\s(.*)")
42 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
44 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
45 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
47 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
48 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
50 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
52 default_config = "/etc/dak/dak.conf"
53 default_apt_config = "/etc/dak/apt.conf"
56 key_uid_email_cache = {}
58 # (hashname, function, earliest_changes_version)
59 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
60 ("sha256", apt_pkg.sha256sum, (1, 8))]
62 ################################################################################
64 def open_file(filename, mode='r'):
66 f = open(filename, mode)
68 raise CantOpenError, filename
71 ################################################################################
73 def our_raw_input(prompt=""):
75 sys.stdout.write(prompt)
81 sys.stderr.write("\nUser interrupt (^D).\n")
84 ################################################################################
86 def extract_component_from_section(section):
89 if section.find('/') != -1:
90 component = section.split('/')[0]
92 # Expand default component
94 if Cnf.has_key("Component::%s" % section):
99 return (section, component)
101 ################################################################################
103 def parse_changes(filename, signing_rules=0):
104 """Parses a changes file and returns a dictionary where each field is a
105 key. The mandatory first argument is the filename of the .changes
108 signing_rules is an optional argument:
110 o If signing_rules == -1, no signature is required.
111 o If signing_rules == 0 (the default), a signature is required.
112 o If signing_rules == 1, it turns on the same strict format checking
115 The rules for (signing_rules == 1)-mode are:
117 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
118 followed by any PGP header data and must end with a blank line.
120 o The data section must end with a blank line and must be followed by
121 "-----BEGIN PGP SIGNATURE-----".
127 changes_in = open_file(filename)
128 lines = changes_in.readlines()
131 raise ParseChangesError, "[Empty changes file]"
133 # Reindex by line number so we can easily verify the format of
139 indexed_lines[index] = line[:-1]
143 num_of_lines = len(indexed_lines.keys())
146 while index < num_of_lines:
148 line = indexed_lines[index]
150 if signing_rules == 1:
152 if index > num_of_lines:
153 raise InvalidDscError, index
154 line = indexed_lines[index]
155 if not line.startswith("-----BEGIN PGP SIGNATURE"):
156 raise InvalidDscError, index
161 if line.startswith("-----BEGIN PGP SIGNATURE"):
163 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
165 if signing_rules == 1:
166 while index < num_of_lines and line != "":
168 line = indexed_lines[index]
170 # If we're not inside the signed data, don't process anything
171 if signing_rules >= 0 and not inside_signature:
173 slf = re_single_line_field.match(line)
175 field = slf.groups()[0].lower()
176 changes[field] = slf.groups()[1]
180 changes[field] += '\n'
182 mlf = re_multi_line_field.match(line)
185 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
186 if first == 1 and changes[field] != "":
187 changes[field] += '\n'
189 changes[field] += mlf.groups()[0] + '\n'
193 if signing_rules == 1 and inside_signature:
194 raise InvalidDscError, index
197 changes["filecontents"] = "".join(lines)
199 if changes.has_key("source"):
200 # Strip the source version in brackets from the source field,
201 # put it in the "source-version" field instead.
202 srcver = re_srchasver.search(changes["source"])
204 changes["source"] = srcver.group(1)
205 changes["source-version"] = srcver.group(2)
208 raise ParseChangesError, error
212 ################################################################################
214 def create_hash (lfiles, key, testfn, basedict = None):
216 for f in lfiles.keys():
218 file_handle = open_file(f)
219 except CantOpenError:
220 rejmsg.append("Could not open file %s for checksumming" % (f))
224 basedict[f]['%ssum' % key] = testfn(file_handle)
229 ################################################################################
231 def check_hash (where, lfiles, key, testfn, basedict = None):
234 for f in basedict.keys():
236 rejmsg.append("%s: no %s checksum" % (f, key))
238 for f in lfiles.keys():
239 if basedict and f not in basedict:
240 rejmsg.append("%s: extraneous entry in %s checksums" % (f, key))
243 file_handle = open_file(f)
244 except CantOpenError:
248 if testfn(file_handle) != lfiles[f][key]:
249 rejmsg.append("%s: %s check failed." % (f, key))
251 # Store the hashes for later use
253 basedict[f]['%ssum' % key] = lfiles[f][key]
255 actual_size = os.stat(f)[stat.ST_SIZE]
256 size = int(lfiles[f]["size"])
257 if size != actual_size:
258 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
259 % (f, actual_size, size, where))
263 ################################################################################
265 def ensure_hashes(changes, dsc, files, dsc_files):
266 # Make sure we recognise the format of the Files: field
267 format = changes.get("format", "0.0").split(".",1)
269 format = int(format[0]), int(format[1])
271 format = int(float(format[0])), 0
275 if x.startswith("checksum-"):
276 h = x.split("-",1)[1]
277 if h not in dict(known_hashes):
278 rejmsg.append("Unsupported checksum field in .changes" % (h))
281 if x.startswith("checksum-"):
282 h = x.split("-",1)[1]
283 if h not in dict(known_hashes):
284 rejmsg.append("Unsupported checksum field in .dsc" % (h))
286 # We have to calculate the hash if we have an earlier changes version than
287 # the hash appears in rather than require it exist in the changes file
288 # I hate backwards compatibility
289 for h,f,v in known_hashes:
291 fs = build_file_list(changes, 0, "checksums-%s" % h, h)
293 for m in create_hash(fs, h, f, files):
296 for m in check_hash(".changes %s" % (h), fs, h, f, files):
298 except NoFilesFieldError:
299 rejmsg.append("No Checksums-%s: field in .changes" % (h))
300 except UnknownFormatError, format:
301 rejmsg.append("%s: unknown format of .changes" % (format))
302 except ParseChangesError, line:
303 rejmsg.append("parse error for Checksums-%s in .changes, can't grok: %s." % (h, line))
305 if "source" not in changes["architecture"]: continue
308 fs = build_file_list(dsc, 1, "checksums-%s" % h, h)
310 for m in create_hash(fs, h, f, dsc_files):
313 for m in check_hash(".dsc %s" % (h), fs, h, f, dsc_files):
315 except UnknownFormatError, format:
316 rejmsg.append("%s: unknown format of .dsc" % (format))
317 except NoFilesFieldError:
318 rejmsg.append("No Checksums-%s: field in .dsc" % (h))
319 except ParseChangesError, line:
320 rejmsg.append("parse error for Checksums-%s in .dsc, can't grok: %s." % (h, line))
324 ################################################################################
326 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
328 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
331 # Make sure we have a Files: field to parse...
332 if not changes.has_key(field):
333 raise NoFilesFieldError
335 # Make sure we recognise the format of the Files: field
336 format = re_verwithext.search(changes.get("format", "0.0"))
338 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
340 format = format.groups()
341 if format[1] == None:
342 format = int(float(format[0])), 0, format[2]
344 format = int(format[0]), int(format[1]), format[2]
345 if format[2] == None:
350 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
352 if (format < (1,5) or format > (1,8)):
353 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
354 if field != "files" and format < (1,8):
355 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
357 includes_section = (not is_a_dsc) and field == "files"
359 # Parse each entry/line:
360 for i in changes[field].split('\n'):
364 section = priority = ""
367 (md5, size, section, priority, name) = s
369 (md5, size, name) = s
371 raise ParseChangesError, i
378 (section, component) = extract_component_from_section(section)
380 files[name] = Dict(size=size, section=section,
381 priority=priority, component=component)
382 files[name][hashname] = md5
386 ################################################################################
388 def force_to_utf8(s):
389 """Forces a string to UTF-8. If the string isn't already UTF-8,
390 it's assumed to be ISO-8859-1."""
395 latin1_s = unicode(s,'iso8859-1')
396 return latin1_s.encode('utf-8')
398 def rfc2047_encode(s):
399 """Encodes a (header) string per RFC2047 if necessary. If the
400 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
402 codecs.lookup('ascii')[1](s)
407 codecs.lookup('utf-8')[1](s)
408 h = email.Header.Header(s, 'utf-8', 998)
411 h = email.Header.Header(s, 'iso-8859-1', 998)
414 ################################################################################
416 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
417 # with it. I know - I'll fix the suckage and make things
420 def fix_maintainer (maintainer):
421 """Parses a Maintainer or Changed-By field and returns:
422 (1) an RFC822 compatible version,
423 (2) an RFC2047 compatible version,
427 The name is forced to UTF-8 for both (1) and (3). If the name field
428 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
429 switched to 'email (name)' format."""
430 maintainer = maintainer.strip()
432 return ('', '', '', '')
434 if maintainer.find("<") == -1:
437 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
438 email = maintainer[1:-1]
441 m = re_parse_maintainer.match(maintainer)
443 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
447 # Get an RFC2047 compliant version of the name
448 rfc2047_name = rfc2047_encode(name)
450 # Force the name to be UTF-8
451 name = force_to_utf8(name)
453 if name.find(',') != -1 or name.find('.') != -1:
454 rfc822_maint = "%s (%s)" % (email, name)
455 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
457 rfc822_maint = "%s <%s>" % (name, email)
458 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
460 if email.find("@") == -1 and email.find("buildd_") != 0:
461 raise ParseMaintError, "No @ found in email address part."
463 return (rfc822_maint, rfc2047_maint, name, email)
465 ################################################################################
467 # sendmail wrapper, takes _either_ a message string or a file as arguments
468 def send_mail (message, filename=""):
469 # If we've been passed a string dump it into a temporary file
471 filename = tempfile.mktemp()
472 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
473 os.write (fd, message)
477 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
479 raise SendmailFailedError, output
481 # Clean up any temporary files
485 ################################################################################
487 def poolify (source, component):
490 if source[:3] == "lib":
491 return component + source[:4] + '/' + source + '/'
493 return component + source[:1] + '/' + source + '/'
495 ################################################################################
497 def move (src, dest, overwrite = 0, perms = 0664):
498 if os.path.exists(dest) and os.path.isdir(dest):
501 dest_dir = os.path.dirname(dest)
502 if not os.path.exists(dest_dir):
503 umask = os.umask(00000)
504 os.makedirs(dest_dir, 02775)
506 #print "Moving %s to %s..." % (src, dest)
507 if os.path.exists(dest) and os.path.isdir(dest):
508 dest += '/' + os.path.basename(src)
509 # Don't overwrite unless forced to
510 if os.path.exists(dest):
512 fubar("Can't move %s to %s - file already exists." % (src, dest))
514 if not os.access(dest, os.W_OK):
515 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
516 shutil.copy2(src, dest)
517 os.chmod(dest, perms)
520 def copy (src, dest, overwrite = 0, perms = 0664):
521 if os.path.exists(dest) and os.path.isdir(dest):
524 dest_dir = os.path.dirname(dest)
525 if not os.path.exists(dest_dir):
526 umask = os.umask(00000)
527 os.makedirs(dest_dir, 02775)
529 #print "Copying %s to %s..." % (src, dest)
530 if os.path.exists(dest) and os.path.isdir(dest):
531 dest += '/' + os.path.basename(src)
532 # Don't overwrite unless forced to
533 if os.path.exists(dest):
535 raise FileExistsError
537 if not os.access(dest, os.W_OK):
538 raise CantOverwriteError
539 shutil.copy2(src, dest)
540 os.chmod(dest, perms)
542 ################################################################################
545 res = socket.gethostbyaddr(socket.gethostname())
546 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
547 if database_hostname:
548 return database_hostname
552 def which_conf_file ():
553 res = socket.gethostbyaddr(socket.gethostname())
554 if Cnf.get("Config::" + res[0] + "::DakConfig"):
555 return Cnf["Config::" + res[0] + "::DakConfig"]
557 return default_config
559 def which_apt_conf_file ():
560 res = socket.gethostbyaddr(socket.gethostname())
561 if Cnf.get("Config::" + res[0] + "::AptConfig"):
562 return Cnf["Config::" + res[0] + "::AptConfig"]
564 return default_apt_config
566 def which_alias_file():
567 hostname = socket.gethostbyaddr(socket.gethostname())[0]
568 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
569 if os.path.exists(aliasfn):
574 ################################################################################
576 # Escape characters which have meaning to SQL's regex comparison operator ('~')
577 # (woefully incomplete)
580 s = s.replace('+', '\\\\+')
581 s = s.replace('.', '\\\\.')
584 ################################################################################
586 # Perform a substition of template
587 def TemplateSubst(map, filename):
588 file = open_file(filename)
589 template = file.read()
591 template = template.replace(x,map[x])
595 ################################################################################
597 def fubar(msg, exit_code=1):
598 sys.stderr.write("E: %s\n" % (msg))
602 sys.stderr.write("W: %s\n" % (msg))
604 ################################################################################
606 # Returns the user name with a laughable attempt at rfc822 conformancy
607 # (read: removing stray periods).
609 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
611 ################################################################################
621 return ("%d%s" % (c, t))
623 ################################################################################
625 def cc_fix_changes (changes):
626 o = changes.get("architecture", "")
628 del changes["architecture"]
629 changes["architecture"] = {}
631 changes["architecture"][j] = 1
633 # Sort by source name, source version, 'have source', and then by filename
634 def changes_compare (a, b):
636 a_changes = parse_changes(a)
641 b_changes = parse_changes(b)
645 cc_fix_changes (a_changes)
646 cc_fix_changes (b_changes)
648 # Sort by source name
649 a_source = a_changes.get("source")
650 b_source = b_changes.get("source")
651 q = cmp (a_source, b_source)
655 # Sort by source version
656 a_version = a_changes.get("version", "0")
657 b_version = b_changes.get("version", "0")
658 q = apt_pkg.VersionCompare(a_version, b_version)
662 # Sort by 'have source'
663 a_has_source = a_changes["architecture"].get("source")
664 b_has_source = b_changes["architecture"].get("source")
665 if a_has_source and not b_has_source:
667 elif b_has_source and not a_has_source:
670 # Fall back to sort by filename
673 ################################################################################
675 def find_next_free (dest, too_many=100):
678 while os.path.exists(dest) and extra < too_many:
679 dest = orig_dest + '.' + repr(extra)
681 if extra >= too_many:
682 raise NoFreeFilenameError
685 ################################################################################
687 def result_join (original, sep = '\t'):
689 for i in xrange(len(original)):
690 if original[i] == None:
693 list.append(original[i])
694 return sep.join(list)
696 ################################################################################
698 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
700 for line in str.split('\n'):
702 if line or include_blank_lines:
703 out += "%s%s\n" % (prefix, line)
704 # Strip trailing new line
709 ################################################################################
711 def validate_changes_file_arg(filename, require_changes=1):
712 """'filename' is either a .changes or .dak file. If 'filename' is a
713 .dak file, it's changed to be the corresponding .changes file. The
714 function then checks if the .changes file a) exists and b) is
715 readable and returns the .changes filename if so. If there's a
716 problem, the next action depends on the option 'require_changes'
719 o If 'require_changes' == -1, errors are ignored and the .changes
720 filename is returned.
721 o If 'require_changes' == 0, a warning is given and 'None' is returned.
722 o If 'require_changes' == 1, a fatal error is raised.
726 orig_filename = filename
727 if filename.endswith(".dak"):
728 filename = filename[:-4]+".changes"
730 if not filename.endswith(".changes"):
731 error = "invalid file type; not a changes file"
733 if not os.access(filename,os.R_OK):
734 if os.path.exists(filename):
735 error = "permission denied"
737 error = "file not found"
740 if require_changes == 1:
741 fubar("%s: %s." % (orig_filename, error))
742 elif require_changes == 0:
743 warn("Skipping %s - %s" % (orig_filename, error))
745 else: # We only care about the .dak file
750 ################################################################################
753 return (arch != "source" and arch != "all")
755 ################################################################################
757 def join_with_commas_and(list):
758 if len(list) == 0: return "nothing"
759 if len(list) == 1: return list[0]
760 return ", ".join(list[:-1]) + " and " + list[-1]
762 ################################################################################
767 (pkg, version, constraint) = atom
769 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
772 pp_deps.append(pp_dep)
773 return " |".join(pp_deps)
775 ################################################################################
780 ################################################################################
782 # Handle -a, -c and -s arguments; returns them as SQL constraints
783 def parse_args(Options):
787 for suite in split_args(Options["Suite"]):
788 suite_id = database.get_suite_id(suite)
790 warn("suite '%s' not recognised." % (suite))
792 suite_ids_list.append(suite_id)
794 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
796 fubar("No valid suite given.")
801 if Options["Component"]:
802 component_ids_list = []
803 for component in split_args(Options["Component"]):
804 component_id = database.get_component_id(component)
805 if component_id == -1:
806 warn("component '%s' not recognised." % (component))
808 component_ids_list.append(component_id)
809 if component_ids_list:
810 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
812 fubar("No valid component given.")
816 # Process architecture
817 con_architectures = ""
818 if Options["Architecture"]:
821 for architecture in split_args(Options["Architecture"]):
822 if architecture == "source":
825 architecture_id = database.get_architecture_id(architecture)
826 if architecture_id == -1:
827 warn("architecture '%s' not recognised." % (architecture))
829 arch_ids_list.append(architecture_id)
831 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
834 fubar("No valid architecture given.")
838 return (con_suites, con_architectures, con_components, check_source)
840 ################################################################################
842 # Inspired(tm) by Bryn Keller's print_exc_plus (See
843 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
846 tb = sys.exc_info()[2]
855 traceback.print_exc()
857 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
858 frame.f_code.co_filename,
860 for key, value in frame.f_locals.items():
861 print "\t%20s = " % key,
865 print "<unable to print>"
867 ################################################################################
869 def try_with_debug(function):
877 ################################################################################
879 # Function for use in sorting lists of architectures.
880 # Sorts normally except that 'source' dominates all others.
882 def arch_compare_sw (a, b):
883 if a == "source" and b == "source":
892 ################################################################################
894 # Split command line arguments which can be separated by either commas
895 # or whitespace. If dwim is set, it will complain about string ending
896 # in comma since this usually means someone did 'dak ls -a i386, m68k
897 # foo' or something and the inevitable confusion resulting from 'm68k'
898 # being treated as an argument is undesirable.
900 def split_args (s, dwim=1):
901 if s.find(",") == -1:
904 if s[-1:] == "," and dwim:
905 fubar("split_args: found trailing comma, spurious space maybe?")
908 ################################################################################
910 def Dict(**dict): return dict
912 ########################################
914 # Our very own version of commands.getouputstatus(), hacked to support
916 def gpgv_get_status_output(cmd, status_read, status_write):
917 cmd = ['/bin/sh', '-c', cmd]
918 p2cread, p2cwrite = os.pipe()
919 c2pread, c2pwrite = os.pipe()
920 errout, errin = os.pipe()
930 for i in range(3, 256):
931 if i != status_write:
937 os.execvp(cmd[0], cmd)
943 os.dup2(c2pread, c2pwrite)
944 os.dup2(errout, errin)
948 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
951 r = os.read(fd, 8196)
954 if fd == c2pwrite or fd == errin:
956 elif fd == status_read:
959 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
961 pid, exit_status = os.waitpid(pid, 0)
963 os.close(status_write)
964 os.close(status_read)
974 return output, status, exit_status
976 ################################################################################
978 def process_gpgv_output(status):
979 # Process the status-fd output
982 for line in status.split('\n'):
988 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
990 (gnupg, keyword) = split[:2]
991 if gnupg != "[GNUPG:]":
992 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
995 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
996 internal_error += "found duplicate status token ('%s').\n" % (keyword)
999 keywords[keyword] = args
1001 return (keywords, internal_error)
1003 ################################################################################
1005 def retrieve_key (filename, keyserver=None, keyring=None):
1006 """Retrieve the key that signed 'filename' from 'keyserver' and
1007 add it to 'keyring'. Returns nothing on success, or an error message
1010 # Defaults for keyserver and keyring
1012 keyserver = Cnf["Dinstall::KeyServer"]
1014 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1016 # Ensure the filename contains no shell meta-characters or other badness
1017 if not re_taint_free.match(filename):
1018 return "%s: tainted filename" % (filename)
1020 # Invoke gpgv on the file
1021 status_read, status_write = os.pipe();
1022 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1023 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1025 # Process the status-fd output
1026 (keywords, internal_error) = process_gpgv_output(status)
1028 return internal_error
1030 if not keywords.has_key("NO_PUBKEY"):
1031 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1033 fingerprint = keywords["NO_PUBKEY"][0]
1034 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1035 # it'll try to create a lockfile in /dev. A better solution might
1036 # be a tempfile or something.
1037 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1038 % (Cnf["Dinstall::SigningKeyring"])
1039 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1040 % (keyring, keyserver, fingerprint)
1041 (result, output) = commands.getstatusoutput(cmd)
1043 return "'%s' failed with exit code %s" % (cmd, result)
1047 ################################################################################
1049 def gpg_keyring_args(keyrings=None):
1051 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1053 return " ".join(["--keyring %s" % x for x in keyrings])
1055 ################################################################################
1057 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1058 """Check the signature of a file and return the fingerprint if the
1059 signature is valid or 'None' if it's not. The first argument is the
1060 filename whose signature should be checked. The second argument is a
1061 reject function and is called when an error is found. The reject()
1062 function must allow for two arguments: the first is the error message,
1063 the second is an optional prefix string. It's possible for reject()
1064 to be called more than once during an invocation of check_signature().
1065 The third argument is optional and is the name of the files the
1066 detached signature applies to. The fourth argument is optional and is
1067 a *list* of keyrings to use. 'autofetch' can either be None, True or
1068 False. If None, the default behaviour specified in the config will be
1071 # Ensure the filename contains no shell meta-characters or other badness
1072 if not re_taint_free.match(sig_filename):
1073 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1076 if data_filename and not re_taint_free.match(data_filename):
1077 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1081 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1083 # Autofetch the signing key if that's enabled
1084 if autofetch == None:
1085 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1087 error_msg = retrieve_key(sig_filename)
1092 # Build the command line
1093 status_read, status_write = os.pipe();
1094 cmd = "gpgv --status-fd %s %s %s %s" % (
1095 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1097 # Invoke gpgv on the file
1098 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1100 # Process the status-fd output
1101 (keywords, internal_error) = process_gpgv_output(status)
1103 # If we failed to parse the status-fd output, let's just whine and bail now
1105 reject("internal error while performing signature check on %s." % (sig_filename))
1106 reject(internal_error, "")
1107 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1111 # Now check for obviously bad things in the processed output
1112 if keywords.has_key("KEYREVOKED"):
1113 reject("The key used to sign %s has been revoked." % (sig_filename))
1115 if keywords.has_key("BADSIG"):
1116 reject("bad signature on %s." % (sig_filename))
1118 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1119 reject("failed to check signature on %s." % (sig_filename))
1121 if keywords.has_key("NO_PUBKEY"):
1122 args = keywords["NO_PUBKEY"]
1125 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1127 if keywords.has_key("BADARMOR"):
1128 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1130 if keywords.has_key("NODATA"):
1131 reject("no signature found in %s." % (sig_filename))
1133 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1134 args = keywords["KEYEXPIRED"]
1137 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1143 # Next check gpgv exited with a zero return code
1145 reject("gpgv failed while checking %s." % (sig_filename))
1147 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1149 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1152 # Sanity check the good stuff we expect
1153 if not keywords.has_key("VALIDSIG"):
1154 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1157 args = keywords["VALIDSIG"]
1159 reject("internal error while checking signature on %s." % (sig_filename))
1162 fingerprint = args[0]
1163 if not keywords.has_key("GOODSIG"):
1164 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1166 if not keywords.has_key("SIG_ID"):
1167 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1170 # Finally ensure there's not something we don't recognise
1171 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1172 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1173 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1175 for keyword in keywords.keys():
1176 if not known_keywords.has_key(keyword):
1177 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1185 ################################################################################
1187 def gpg_get_key_addresses(fingerprint):
1188 """retreive email addresses from gpg key uids for a given fingerprint"""
1189 addresses = key_uid_email_cache.get(fingerprint)
1190 if addresses != None:
1193 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1194 % (gpg_keyring_args(), fingerprint)
1195 (result, output) = commands.getstatusoutput(cmd)
1197 for l in output.split('\n'):
1198 m = re_gpg_uid.match(l)
1200 addresses.add(m.group(1))
1201 key_uid_email_cache[fingerprint] = addresses
1204 ################################################################################
1206 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1208 def wrap(paragraph, max_length, prefix=""):
1212 words = paragraph.split()
1215 word_size = len(word)
1216 if word_size > max_length:
1218 s += line + '\n' + prefix
1219 s += word + '\n' + prefix
1222 new_length = len(line) + word_size + 1
1223 if new_length > max_length:
1224 s += line + '\n' + prefix
1237 ################################################################################
1239 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1240 # Returns fixed 'src'
1241 def clean_symlink (src, dest, root):
1242 src = src.replace(root, '', 1)
1243 dest = dest.replace(root, '', 1)
1244 dest = os.path.dirname(dest)
1245 new_src = '../' * len(dest.split('/'))
1246 return new_src + src
1248 ################################################################################
1250 def temp_filename(directory=None, dotprefix=None, perms=0700):
1251 """Return a secure and unique filename by pre-creating it.
1252 If 'directory' is non-null, it will be the directory the file is pre-created in.
1253 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1256 old_tempdir = tempfile.tempdir
1257 tempfile.tempdir = directory
1259 filename = tempfile.mktemp()
1262 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1263 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1267 tempfile.tempdir = old_tempdir
1271 ################################################################################
1273 # checks if the user part of the email is listed in the alias file
1275 def is_email_alias(email):
1277 if alias_cache == None:
1278 aliasfn = which_alias_file()
1281 for l in open(aliasfn):
1282 alias_cache.add(l.split(':')[0])
1283 uid = email.split('@')[0]
1284 return uid in alias_cache
1286 ################################################################################
1290 Cnf = apt_pkg.newConfiguration()
1291 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1293 if which_conf_file() != default_config:
1294 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1296 ################################################################################