2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
51 ################################################################################
53 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
54 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
56 alias_cache = None #: Cache for email alias checks
57 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
63 ################################################################################
66 """ Escape html chars """
67 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
69 ################################################################################
71 def open_file(filename, mode='r'):
73 Open C{file}, return fileobject.
75 @type filename: string
76 @param filename: path/filename to open
79 @param mode: open mode
82 @return: open fileobject
84 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
88 f = open(filename, mode)
90 raise CantOpenError, filename
93 ################################################################################
95 def our_raw_input(prompt=""):
97 sys.stdout.write(prompt)
103 sys.stderr.write("\nUser interrupt (^D).\n")
106 ################################################################################
108 def extract_component_from_section(section):
111 if section.find('/') != -1:
112 component = section.split('/')[0]
114 # Expand default component
116 if Cnf.has_key("Component::%s" % section):
121 return (section, component)
123 ################################################################################
125 def parse_deb822(contents, signing_rules=0):
129 # Split the lines in the input, keeping the linebreaks.
130 lines = contents.splitlines(True)
133 raise ParseChangesError, "[Empty changes file]"
135 # Reindex by line number so we can easily verify the format of
141 indexed_lines[index] = line[:-1]
145 num_of_lines = len(indexed_lines.keys())
148 while index < num_of_lines:
150 line = indexed_lines[index]
152 if signing_rules == 1:
154 if index > num_of_lines:
155 raise InvalidDscError, index
156 line = indexed_lines[index]
157 if not line.startswith("-----BEGIN PGP SIGNATURE"):
158 raise InvalidDscError, index
163 if line.startswith("-----BEGIN PGP SIGNATURE"):
165 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
167 if signing_rules == 1:
168 while index < num_of_lines and line != "":
170 line = indexed_lines[index]
172 # If we're not inside the signed data, don't process anything
173 if signing_rules >= 0 and not inside_signature:
175 slf = re_single_line_field.match(line)
177 field = slf.groups()[0].lower()
178 changes[field] = slf.groups()[1]
182 changes[field] += '\n'
184 mlf = re_multi_line_field.match(line)
187 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
188 if first == 1 and changes[field] != "":
189 changes[field] += '\n'
191 changes[field] += mlf.groups()[0] + '\n'
195 if signing_rules == 1 and inside_signature:
196 raise InvalidDscError, index
198 changes["filecontents"] = "".join(lines)
200 if changes.has_key("source"):
201 # Strip the source version in brackets from the source field,
202 # put it in the "source-version" field instead.
203 srcver = re_srchasver.search(changes["source"])
205 changes["source"] = srcver.group(1)
206 changes["source-version"] = srcver.group(2)
209 raise ParseChangesError, error
213 ################################################################################
215 def parse_changes(filename, signing_rules=0):
217 Parses a changes file and returns a dictionary where each field is a
218 key. The mandatory first argument is the filename of the .changes
221 signing_rules is an optional argument:
223 - If signing_rules == -1, no signature is required.
224 - If signing_rules == 0 (the default), a signature is required.
225 - If signing_rules == 1, it turns on the same strict format checking
228 The rules for (signing_rules == 1)-mode are:
230 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
231 followed by any PGP header data and must end with a blank line.
233 - The data section must end with a blank line and must be followed by
234 "-----BEGIN PGP SIGNATURE-----".
237 changes_in = open_file(filename)
238 content = changes_in.read()
241 unicode(content, 'utf-8')
243 raise ChangesUnicodeError, "Changes file not proper utf-8"
244 return parse_deb822(content, signing_rules)
246 ################################################################################
248 def hash_key(hashname):
249 return '%ssum' % hashname
251 ################################################################################
253 def create_hash(where, files, hashname, hashfunc):
255 create_hash extends the passed files dict with the given hash by
256 iterating over all files on disk and passing them to the hashing
261 for f in files.keys():
263 file_handle = open_file(f)
264 except CantOpenError:
265 rejmsg.append("Could not open file %s for checksumming" % (f))
268 files[f][hash_key(hashname)] = hashfunc(file_handle)
273 ################################################################################
275 def check_hash(where, files, hashname, hashfunc):
277 check_hash checks the given hash in the files dict against the actual
278 files on disk. The hash values need to be present consistently in
279 all file entries. It does not modify its input in any way.
283 for f in files.keys():
287 file_handle = open_file(f)
289 # Check for the hash entry, to not trigger a KeyError.
290 if not files[f].has_key(hash_key(hashname)):
291 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
295 # Actually check the hash for correctness.
296 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
297 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
299 except CantOpenError:
300 # TODO: This happens when the file is in the pool.
301 # warn("Cannot open file %s" % f)
308 ################################################################################
310 def check_size(where, files):
312 check_size checks the file sizes in the passed files dict against the
317 for f in files.keys():
322 # TODO: This happens when the file is in the pool.
326 actual_size = entry[stat.ST_SIZE]
327 size = int(files[f]["size"])
328 if size != actual_size:
329 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
330 % (f, actual_size, size, where))
333 ################################################################################
335 def check_hash_fields(what, manifest):
337 check_hash_fields ensures that there are no checksum fields in the
338 given dict that we do not know about.
342 hashes = map(lambda x: x[0], known_hashes)
343 for field in manifest:
344 if field.startswith("checksums-"):
345 hashname = field.split("-",1)[1]
346 if hashname not in hashes:
347 rejmsg.append("Unsupported checksum field for %s "\
348 "in %s" % (hashname, what))
351 ################################################################################
353 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
354 if format >= version:
355 # The version should contain the specified hash.
358 # Import hashes from the changes
359 rejmsg = parse_checksums(".changes", files, changes, hashname)
363 # We need to calculate the hash because it can't possibly
366 return func(".changes", files, hashname, hashfunc)
368 # We could add the orig which might be in the pool to the files dict to
369 # access the checksums easily.
371 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
373 ensure_dsc_hashes' task is to ensure that each and every *present* hash
374 in the dsc is correct, i.e. identical to the changes file and if necessary
375 the pool. The latter task is delegated to check_hash.
379 if not dsc.has_key('Checksums-%s' % (hashname,)):
381 # Import hashes from the dsc
382 parse_checksums(".dsc", dsc_files, dsc, hashname)
384 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
387 ################################################################################
389 def parse_checksums(where, files, manifest, hashname):
391 field = 'checksums-%s' % hashname
392 if not field in manifest:
394 for line in manifest[field].split('\n'):
397 checksum, size, checkfile = line.strip().split(' ')
398 if not files.has_key(checkfile):
399 # TODO: check for the file's entry in the original files dict, not
400 # the one modified by (auto)byhand and other weird stuff
401 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
402 # (file, hashname, where))
404 if not files[checkfile]["size"] == size:
405 rejmsg.append("%s: size differs for files and checksums-%s entry "\
406 "in %s" % (checkfile, hashname, where))
408 files[checkfile][hash_key(hashname)] = checksum
409 for f in files.keys():
410 if not files[f].has_key(hash_key(hashname)):
411 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
415 ################################################################################
417 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
419 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
422 # Make sure we have a Files: field to parse...
423 if not changes.has_key(field):
424 raise NoFilesFieldError
426 # Make sure we recognise the format of the Files: field
427 format = re_verwithext.search(changes.get("format", "0.0"))
429 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
431 format = format.groups()
432 if format[1] == None:
433 format = int(float(format[0])), 0, format[2]
435 format = int(format[0]), int(format[1]), format[2]
436 if format[2] == None:
440 # format = (1,0) are the only formats we currently accept,
441 # format = (0,0) are missing format headers of which we still
442 # have some in the archive.
443 if format != (1,0) and format != (0,0):
444 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
446 if (format < (1,5) or format > (1,8)):
447 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
448 if field != "files" and format < (1,8):
449 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
451 includes_section = (not is_a_dsc) and field == "files"
453 # Parse each entry/line:
454 for i in changes[field].split('\n'):
458 section = priority = ""
461 (md5, size, section, priority, name) = s
463 (md5, size, name) = s
465 raise ParseChangesError, i
472 (section, component) = extract_component_from_section(section)
474 files[name] = Dict(size=size, section=section,
475 priority=priority, component=component)
476 files[name][hashname] = md5
480 ################################################################################
482 def send_mail (message, filename=""):
483 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
485 # If we've been passed a string dump it into a temporary file
487 (fd, filename) = tempfile.mkstemp()
488 os.write (fd, message)
491 if Cnf.has_key("Dinstall::MailWhiteList") and \
492 Cnf["Dinstall::MailWhiteList"] != "":
493 message_in = open_file(filename)
494 message_raw = modemail.message_from_file(message_in)
498 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
500 for line in whitelist_in:
501 if not re_whitespace_comment.match(line):
502 if re_re_mark.match(line):
503 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
505 whitelist.append(re.compile(re.escape(line.strip())))
510 fields = ["To", "Bcc", "Cc"]
513 value = message_raw.get(field, None)
516 for item in value.split(","):
517 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
523 if not mail_whitelisted:
524 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
528 # Doesn't have any mail in whitelist so remove the header
530 del message_raw[field]
532 message_raw.replace_header(field, string.join(match, ", "))
534 # Change message fields in order if we don't have a To header
535 if not message_raw.has_key("To"):
538 if message_raw.has_key(field):
539 message_raw[fields[-1]] = message_raw[field]
540 del message_raw[field]
543 # Clean up any temporary files
544 # and return, as we removed all recipients.
546 os.unlink (filename);
549 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
550 os.write (fd, message_raw.as_string(True));
554 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
556 raise SendmailFailedError, output
558 # Clean up any temporary files
562 ################################################################################
564 def poolify (source, component):
567 if source[:3] == "lib":
568 return component + source[:4] + '/' + source + '/'
570 return component + source[:1] + '/' + source + '/'
572 ################################################################################
574 def move (src, dest, overwrite = 0, perms = 0664):
575 if os.path.exists(dest) and os.path.isdir(dest):
578 dest_dir = os.path.dirname(dest)
579 if not os.path.exists(dest_dir):
580 umask = os.umask(00000)
581 os.makedirs(dest_dir, 02775)
583 #print "Moving %s to %s..." % (src, dest)
584 if os.path.exists(dest) and os.path.isdir(dest):
585 dest += '/' + os.path.basename(src)
586 # Don't overwrite unless forced to
587 if os.path.exists(dest):
589 fubar("Can't move %s to %s - file already exists." % (src, dest))
591 if not os.access(dest, os.W_OK):
592 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
593 shutil.copy2(src, dest)
594 os.chmod(dest, perms)
597 def copy (src, dest, overwrite = 0, perms = 0664):
598 if os.path.exists(dest) and os.path.isdir(dest):
601 dest_dir = os.path.dirname(dest)
602 if not os.path.exists(dest_dir):
603 umask = os.umask(00000)
604 os.makedirs(dest_dir, 02775)
606 #print "Copying %s to %s..." % (src, dest)
607 if os.path.exists(dest) and os.path.isdir(dest):
608 dest += '/' + os.path.basename(src)
609 # Don't overwrite unless forced to
610 if os.path.exists(dest):
612 raise FileExistsError
614 if not os.access(dest, os.W_OK):
615 raise CantOverwriteError
616 shutil.copy2(src, dest)
617 os.chmod(dest, perms)
619 ################################################################################
622 res = socket.gethostbyaddr(socket.gethostname())
623 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
624 if database_hostname:
625 return database_hostname
629 def which_conf_file ():
630 res = socket.gethostbyaddr(socket.gethostname())
631 # In case we allow local config files per user, try if one exists
632 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
633 homedir = os.getenv("HOME")
634 confpath = os.path.join(homedir, "/etc/dak.conf")
635 if os.path.exists(confpath):
636 apt_pkg.ReadConfigFileISC(Cnf,default_config)
638 # We are still in here, so there is no local config file or we do
639 # not allow local files. Do the normal stuff.
640 if Cnf.get("Config::" + res[0] + "::DakConfig"):
641 return Cnf["Config::" + res[0] + "::DakConfig"]
643 return default_config
645 def which_apt_conf_file ():
646 res = socket.gethostbyaddr(socket.gethostname())
647 # In case we allow local config files per user, try if one exists
648 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
649 homedir = os.getenv("HOME")
650 confpath = os.path.join(homedir, "/etc/dak.conf")
651 if os.path.exists(confpath):
652 apt_pkg.ReadConfigFileISC(Cnf,default_config)
654 if Cnf.get("Config::" + res[0] + "::AptConfig"):
655 return Cnf["Config::" + res[0] + "::AptConfig"]
657 return default_apt_config
659 def which_alias_file():
660 hostname = socket.gethostbyaddr(socket.gethostname())[0]
661 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
662 if os.path.exists(aliasfn):
667 ################################################################################
669 def TemplateSubst(map, filename):
670 """ Perform a substition of template """
671 templatefile = open_file(filename)
672 template = templatefile.read()
674 template = template.replace(x, str(map[x]))
678 ################################################################################
680 def fubar(msg, exit_code=1):
681 sys.stderr.write("E: %s\n" % (msg))
685 sys.stderr.write("W: %s\n" % (msg))
687 ################################################################################
689 # Returns the user name with a laughable attempt at rfc822 conformancy
690 # (read: removing stray periods).
692 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
694 ################################################################################
704 return ("%d%s" % (c, t))
706 ################################################################################
708 def cc_fix_changes (changes):
709 o = changes.get("architecture", "")
711 del changes["architecture"]
712 changes["architecture"] = {}
714 changes["architecture"][j] = 1
716 def changes_compare (a, b):
717 """ Sort by source name, source version, 'have source', and then by filename """
719 a_changes = parse_changes(a)
724 b_changes = parse_changes(b)
728 cc_fix_changes (a_changes)
729 cc_fix_changes (b_changes)
731 # Sort by source name
732 a_source = a_changes.get("source")
733 b_source = b_changes.get("source")
734 q = cmp (a_source, b_source)
738 # Sort by source version
739 a_version = a_changes.get("version", "0")
740 b_version = b_changes.get("version", "0")
741 q = apt_pkg.VersionCompare(a_version, b_version)
745 # Sort by 'have source'
746 a_has_source = a_changes["architecture"].get("source")
747 b_has_source = b_changes["architecture"].get("source")
748 if a_has_source and not b_has_source:
750 elif b_has_source and not a_has_source:
753 # Fall back to sort by filename
756 ################################################################################
758 def find_next_free (dest, too_many=100):
761 while os.path.exists(dest) and extra < too_many:
762 dest = orig_dest + '.' + repr(extra)
764 if extra >= too_many:
765 raise NoFreeFilenameError
768 ################################################################################
770 def result_join (original, sep = '\t'):
772 for i in xrange(len(original)):
773 if original[i] == None:
774 resultlist.append("")
776 resultlist.append(original[i])
777 return sep.join(resultlist)
779 ################################################################################
781 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
783 for line in str.split('\n'):
785 if line or include_blank_lines:
786 out += "%s%s\n" % (prefix, line)
787 # Strip trailing new line
792 ################################################################################
794 def validate_changes_file_arg(filename, require_changes=1):
796 'filename' is either a .changes or .dak file. If 'filename' is a
797 .dak file, it's changed to be the corresponding .changes file. The
798 function then checks if the .changes file a) exists and b) is
799 readable and returns the .changes filename if so. If there's a
800 problem, the next action depends on the option 'require_changes'
803 - If 'require_changes' == -1, errors are ignored and the .changes
804 filename is returned.
805 - If 'require_changes' == 0, a warning is given and 'None' is returned.
806 - If 'require_changes' == 1, a fatal error is raised.
811 orig_filename = filename
812 if filename.endswith(".dak"):
813 filename = filename[:-4]+".changes"
815 if not filename.endswith(".changes"):
816 error = "invalid file type; not a changes file"
818 if not os.access(filename,os.R_OK):
819 if os.path.exists(filename):
820 error = "permission denied"
822 error = "file not found"
825 if require_changes == 1:
826 fubar("%s: %s." % (orig_filename, error))
827 elif require_changes == 0:
828 warn("Skipping %s - %s" % (orig_filename, error))
830 else: # We only care about the .dak file
835 ################################################################################
838 return (arch != "source" and arch != "all")
840 ################################################################################
842 def join_with_commas_and(list):
843 if len(list) == 0: return "nothing"
844 if len(list) == 1: return list[0]
845 return ", ".join(list[:-1]) + " and " + list[-1]
847 ################################################################################
852 (pkg, version, constraint) = atom
854 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
857 pp_deps.append(pp_dep)
858 return " |".join(pp_deps)
860 ################################################################################
865 ################################################################################
867 def parse_args(Options):
868 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
869 # XXX: This should go away and everything which calls it be converted
870 # to use SQLA properly. For now, we'll just fix it not to use
871 # the old Pg interface though
872 session = DBConn().session()
876 for suitename in split_args(Options["Suite"]):
877 suite = get_suite(suitename, session=session)
878 if suite.suite_id is None:
879 warn("suite '%s' not recognised." % (suite.suite_name))
881 suite_ids_list.append(suite.suite_id)
883 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
885 fubar("No valid suite given.")
890 if Options["Component"]:
891 component_ids_list = []
892 for componentname in split_args(Options["Component"]):
893 component = get_component(componentname, session=session)
894 if component is None:
895 warn("component '%s' not recognised." % (componentname))
897 component_ids_list.append(component.component_id)
898 if component_ids_list:
899 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
901 fubar("No valid component given.")
905 # Process architecture
906 con_architectures = ""
908 if Options["Architecture"]:
910 for archname in split_args(Options["Architecture"]):
911 if archname == "source":
914 arch = get_architecture(archname, session=session)
916 warn("architecture '%s' not recognised." % (archname))
918 arch_ids_list.append(arch.arch_id)
920 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
923 fubar("No valid architecture given.")
927 return (con_suites, con_architectures, con_components, check_source)
929 ################################################################################
931 # Inspired(tm) by Bryn Keller's print_exc_plus (See
932 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
935 tb = sys.exc_info()[2]
944 traceback.print_exc()
946 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
947 frame.f_code.co_filename,
949 for key, value in frame.f_locals.items():
950 print "\t%20s = " % key,
954 print "<unable to print>"
956 ################################################################################
958 def try_with_debug(function):
966 ################################################################################
968 def arch_compare_sw (a, b):
970 Function for use in sorting lists of architectures.
972 Sorts normally except that 'source' dominates all others.
975 if a == "source" and b == "source":
984 ################################################################################
986 def split_args (s, dwim=1):
988 Split command line arguments which can be separated by either commas
989 or whitespace. If dwim is set, it will complain about string ending
990 in comma since this usually means someone did 'dak ls -a i386, m68k
991 foo' or something and the inevitable confusion resulting from 'm68k'
992 being treated as an argument is undesirable.
995 if s.find(",") == -1:
998 if s[-1:] == "," and dwim:
999 fubar("split_args: found trailing comma, spurious space maybe?")
1002 ################################################################################
1004 def Dict(**dict): return dict
1006 ########################################
1008 def gpgv_get_status_output(cmd, status_read, status_write):
1010 Our very own version of commands.getouputstatus(), hacked to support
1014 cmd = ['/bin/sh', '-c', cmd]
1015 p2cread, p2cwrite = os.pipe()
1016 c2pread, c2pwrite = os.pipe()
1017 errout, errin = os.pipe()
1027 for i in range(3, 256):
1028 if i != status_write:
1034 os.execvp(cmd[0], cmd)
1040 os.dup2(c2pread, c2pwrite)
1041 os.dup2(errout, errin)
1043 output = status = ""
1045 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1048 r = os.read(fd, 8196)
1050 more_data.append(fd)
1051 if fd == c2pwrite or fd == errin:
1053 elif fd == status_read:
1056 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1058 pid, exit_status = os.waitpid(pid, 0)
1060 os.close(status_write)
1061 os.close(status_read)
1071 return output, status, exit_status
1073 ################################################################################
1075 def process_gpgv_output(status):
1076 # Process the status-fd output
1079 for line in status.split('\n'):
1083 split = line.split()
1085 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1087 (gnupg, keyword) = split[:2]
1088 if gnupg != "[GNUPG:]":
1089 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1092 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1093 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1096 keywords[keyword] = args
1098 return (keywords, internal_error)
1100 ################################################################################
1102 def retrieve_key (filename, keyserver=None, keyring=None):
1104 Retrieve the key that signed 'filename' from 'keyserver' and
1105 add it to 'keyring'. Returns nothing on success, or an error message
1109 # Defaults for keyserver and keyring
1111 keyserver = Cnf["Dinstall::KeyServer"]
1113 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1115 # Ensure the filename contains no shell meta-characters or other badness
1116 if not re_taint_free.match(filename):
1117 return "%s: tainted filename" % (filename)
1119 # Invoke gpgv on the file
1120 status_read, status_write = os.pipe()
1121 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1122 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1124 # Process the status-fd output
1125 (keywords, internal_error) = process_gpgv_output(status)
1127 return internal_error
1129 if not keywords.has_key("NO_PUBKEY"):
1130 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1132 fingerprint = keywords["NO_PUBKEY"][0]
1133 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1134 # it'll try to create a lockfile in /dev. A better solution might
1135 # be a tempfile or something.
1136 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1137 % (Cnf["Dinstall::SigningKeyring"])
1138 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1139 % (keyring, keyserver, fingerprint)
1140 (result, output) = commands.getstatusoutput(cmd)
1142 return "'%s' failed with exit code %s" % (cmd, result)
1146 ################################################################################
1148 def gpg_keyring_args(keyrings=None):
1150 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1152 return " ".join(["--keyring %s" % x for x in keyrings])
1154 ################################################################################
1156 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1158 Check the signature of a file and return the fingerprint if the
1159 signature is valid or 'None' if it's not. The first argument is the
1160 filename whose signature should be checked. The second argument is a
1161 reject function and is called when an error is found. The reject()
1162 function must allow for two arguments: the first is the error message,
1163 the second is an optional prefix string. It's possible for reject()
1164 to be called more than once during an invocation of check_signature().
1165 The third argument is optional and is the name of the files the
1166 detached signature applies to. The fourth argument is optional and is
1167 a *list* of keyrings to use. 'autofetch' can either be None, True or
1168 False. If None, the default behaviour specified in the config will be
1174 # Ensure the filename contains no shell meta-characters or other badness
1175 if not re_taint_free.match(sig_filename):
1176 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1177 return (None, rejects)
1179 if data_filename and not re_taint_free.match(data_filename):
1180 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1181 return (None, rejects)
1184 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1186 # Autofetch the signing key if that's enabled
1187 if autofetch == None:
1188 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1190 error_msg = retrieve_key(sig_filename)
1192 rejects.append(error_msg)
1193 return (None, rejects)
1195 # Build the command line
1196 status_read, status_write = os.pipe()
1197 cmd = "gpgv --status-fd %s %s %s %s" % (
1198 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1200 # Invoke gpgv on the file
1201 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1203 # Process the status-fd output
1204 (keywords, internal_error) = process_gpgv_output(status)
1206 # If we failed to parse the status-fd output, let's just whine and bail now
1208 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1209 rejects.append(internal_error, "")
1210 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1211 return (None, rejects)
1213 # Now check for obviously bad things in the processed output
1214 if keywords.has_key("KEYREVOKED"):
1215 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1216 if keywords.has_key("BADSIG"):
1217 rejects.append("bad signature on %s." % (sig_filename))
1218 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1219 rejects.append("failed to check signature on %s." % (sig_filename))
1220 if keywords.has_key("NO_PUBKEY"):
1221 args = keywords["NO_PUBKEY"]
1224 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1225 if keywords.has_key("BADARMOR"):
1226 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1227 if keywords.has_key("NODATA"):
1228 rejects.append("no signature found in %s." % (sig_filename))
1229 if keywords.has_key("EXPKEYSIG"):
1230 args = keywords["EXPKEYSIG"]
1233 rejects.append("Signature made by expired key 0x%s" % (key))
1234 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1235 args = keywords["KEYEXPIRED"]
1239 if timestamp.count("T") == 0:
1241 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1243 expiredate = "unknown (%s)" % (timestamp)
1245 expiredate = timestamp
1246 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1248 if len(rejects) > 0:
1249 return (None, rejects)
1251 # Next check gpgv exited with a zero return code
1253 rejects.append("gpgv failed while checking %s." % (sig_filename))
1255 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1257 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1258 return (None, rejects)
1260 # Sanity check the good stuff we expect
1261 if not keywords.has_key("VALIDSIG"):
1262 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1264 args = keywords["VALIDSIG"]
1266 rejects.append("internal error while checking signature on %s." % (sig_filename))
1268 fingerprint = args[0]
1269 if not keywords.has_key("GOODSIG"):
1270 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1271 if not keywords.has_key("SIG_ID"):
1272 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1274 # Finally ensure there's not something we don't recognise
1275 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1276 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1277 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1279 for keyword in keywords.keys():
1280 if not known_keywords.has_key(keyword):
1281 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1283 if len(rejects) > 0:
1284 return (None, rejects)
1286 return (fingerprint, [])
1288 ################################################################################
1290 def gpg_get_key_addresses(fingerprint):
1291 """retreive email addresses from gpg key uids for a given fingerprint"""
1292 addresses = key_uid_email_cache.get(fingerprint)
1293 if addresses != None:
1296 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1297 % (gpg_keyring_args(), fingerprint)
1298 (result, output) = commands.getstatusoutput(cmd)
1300 for l in output.split('\n'):
1301 m = re_gpg_uid.match(l)
1303 addresses.add(m.group(1))
1304 key_uid_email_cache[fingerprint] = addresses
1307 ################################################################################
1309 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1311 def wrap(paragraph, max_length, prefix=""):
1315 words = paragraph.split()
1318 word_size = len(word)
1319 if word_size > max_length:
1321 s += line + '\n' + prefix
1322 s += word + '\n' + prefix
1325 new_length = len(line) + word_size + 1
1326 if new_length > max_length:
1327 s += line + '\n' + prefix
1340 ################################################################################
1342 def clean_symlink (src, dest, root):
1344 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1347 src = src.replace(root, '', 1)
1348 dest = dest.replace(root, '', 1)
1349 dest = os.path.dirname(dest)
1350 new_src = '../' * len(dest.split('/'))
1351 return new_src + src
1353 ################################################################################
1355 def temp_filename(directory=None, prefix="dak", suffix=""):
1357 Return a secure and unique filename by pre-creating it.
1358 If 'directory' is non-null, it will be the directory the file is pre-created in.
1359 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1360 If 'suffix' is non-null, the filename will end with it.
1362 Returns a pair (fd, name).
1365 return tempfile.mkstemp(suffix, prefix, directory)
1367 ################################################################################
1369 def temp_dirname(parent=None, prefix="dak", suffix=""):
1371 Return a secure and unique directory by pre-creating it.
1372 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1373 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1374 If 'suffix' is non-null, the filename will end with it.
1376 Returns a pathname to the new directory
1379 return tempfile.mkdtemp(suffix, prefix, parent)
1381 ################################################################################
1383 def is_email_alias(email):
1384 """ checks if the user part of the email is listed in the alias file """
1386 if alias_cache == None:
1387 aliasfn = which_alias_file()
1390 for l in open(aliasfn):
1391 alias_cache.add(l.split(':')[0])
1392 uid = email.split('@')[0]
1393 return uid in alias_cache
1395 ################################################################################
1397 def get_changes_files(dir):
1399 Takes a directory and lists all .changes files in it (as well as chdir'ing
1400 to the directory; this is due to broken behaviour on the part of p-u/p-a
1401 when you're not in the right place)
1403 Returns a list of filenames
1406 # Much of the rest of p-u/p-a depends on being in the right place
1408 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1410 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1412 return changes_files
1414 ################################################################################
1418 Cnf = apt_pkg.newConfiguration()
1419 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1421 if which_conf_file() != default_config:
1422 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1424 ###############################################################################