2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
51 ################################################################################
53 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
54 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
56 alias_cache = None #: Cache for email alias checks
57 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
63 ################################################################################
66 """ Escape html chars """
67 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
69 ################################################################################
71 def open_file(filename, mode='r'):
73 Open C{file}, return fileobject.
75 @type filename: string
76 @param filename: path/filename to open
79 @param mode: open mode
82 @return: open fileobject
84 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
88 f = open(filename, mode)
90 raise CantOpenError, filename
93 ################################################################################
95 def our_raw_input(prompt=""):
97 sys.stdout.write(prompt)
103 sys.stderr.write("\nUser interrupt (^D).\n")
106 ################################################################################
108 def extract_component_from_section(section):
111 if section.find('/') != -1:
112 component = section.split('/')[0]
114 # Expand default component
116 if Cnf.has_key("Component::%s" % section):
121 return (section, component)
123 ################################################################################
125 def parse_deb822(contents, signing_rules=0):
129 # Split the lines in the input, keeping the linebreaks.
130 lines = contents.splitlines(True)
133 raise ParseChangesError, "[Empty changes file]"
135 # Reindex by line number so we can easily verify the format of
141 indexed_lines[index] = line[:-1]
145 num_of_lines = len(indexed_lines.keys())
148 while index < num_of_lines:
150 line = indexed_lines[index]
152 if signing_rules == 1:
154 if index > num_of_lines:
155 raise InvalidDscError, index
156 line = indexed_lines[index]
157 if not line.startswith("-----BEGIN PGP SIGNATURE"):
158 raise InvalidDscError, index
163 if line.startswith("-----BEGIN PGP SIGNATURE"):
165 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
167 if signing_rules == 1:
168 while index < num_of_lines and line != "":
170 line = indexed_lines[index]
172 # If we're not inside the signed data, don't process anything
173 if signing_rules >= 0 and not inside_signature:
175 slf = re_single_line_field.match(line)
177 field = slf.groups()[0].lower()
178 changes[field] = slf.groups()[1]
182 changes[field] += '\n'
184 mlf = re_multi_line_field.match(line)
187 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
188 if first == 1 and changes[field] != "":
189 changes[field] += '\n'
191 changes[field] += mlf.groups()[0] + '\n'
195 if signing_rules == 1 and inside_signature:
196 raise InvalidDscError, index
198 changes["filecontents"] = "".join(lines)
200 if changes.has_key("source"):
201 # Strip the source version in brackets from the source field,
202 # put it in the "source-version" field instead.
203 srcver = re_srchasver.search(changes["source"])
205 changes["source"] = srcver.group(1)
206 changes["source-version"] = srcver.group(2)
209 raise ParseChangesError, error
213 ################################################################################
215 def parse_changes(filename, signing_rules=0):
217 Parses a changes file and returns a dictionary where each field is a
218 key. The mandatory first argument is the filename of the .changes
221 signing_rules is an optional argument:
223 - If signing_rules == -1, no signature is required.
224 - If signing_rules == 0 (the default), a signature is required.
225 - If signing_rules == 1, it turns on the same strict format checking
228 The rules for (signing_rules == 1)-mode are:
230 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
231 followed by any PGP header data and must end with a blank line.
233 - The data section must end with a blank line and must be followed by
234 "-----BEGIN PGP SIGNATURE-----".
237 changes_in = open_file(filename)
238 content = changes_in.read()
241 unicode(content, 'utf-8')
243 raise ChangesUnicodeError, "Changes file not proper utf-8"
244 return parse_deb822(content, signing_rules)
246 ################################################################################
248 def hash_key(hashname):
249 return '%ssum' % hashname
251 ################################################################################
253 def create_hash(where, files, hashname, hashfunc):
255 create_hash extends the passed files dict with the given hash by
256 iterating over all files on disk and passing them to the hashing
261 for f in files.keys():
263 file_handle = open_file(f)
264 except CantOpenError:
265 rejmsg.append("Could not open file %s for checksumming" % (f))
268 files[f][hash_key(hashname)] = hashfunc(file_handle)
273 ################################################################################
275 def check_hash(where, files, hashname, hashfunc):
277 check_hash checks the given hash in the files dict against the actual
278 files on disk. The hash values need to be present consistently in
279 all file entries. It does not modify its input in any way.
283 for f in files.keys():
287 file_handle = open_file(f)
289 # Check for the hash entry, to not trigger a KeyError.
290 if not files[f].has_key(hash_key(hashname)):
291 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
295 # Actually check the hash for correctness.
296 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
297 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
299 except CantOpenError:
300 # TODO: This happens when the file is in the pool.
301 # warn("Cannot open file %s" % f)
308 ################################################################################
310 def check_size(where, files):
312 check_size checks the file sizes in the passed files dict against the
317 for f in files.keys():
322 # TODO: This happens when the file is in the pool.
326 actual_size = entry[stat.ST_SIZE]
327 size = int(files[f]["size"])
328 if size != actual_size:
329 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
330 % (f, actual_size, size, where))
333 ################################################################################
335 def check_hash_fields(what, manifest):
337 check_hash_fields ensures that there are no checksum fields in the
338 given dict that we do not know about.
342 hashes = map(lambda x: x[0], known_hashes)
343 for field in manifest:
344 if field.startswith("checksums-"):
345 hashname = field.split("-",1)[1]
346 if hashname not in hashes:
347 rejmsg.append("Unsupported checksum field for %s "\
348 "in %s" % (hashname, what))
351 ################################################################################
353 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
354 if format >= version:
355 # The version should contain the specified hash.
358 # Import hashes from the changes
359 rejmsg = parse_checksums(".changes", files, changes, hashname)
363 # We need to calculate the hash because it can't possibly
366 return func(".changes", files, hashname, hashfunc)
368 # We could add the orig which might be in the pool to the files dict to
369 # access the checksums easily.
371 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
373 ensure_dsc_hashes' task is to ensure that each and every *present* hash
374 in the dsc is correct, i.e. identical to the changes file and if necessary
375 the pool. The latter task is delegated to check_hash.
379 if not dsc.has_key('Checksums-%s' % (hashname,)):
381 # Import hashes from the dsc
382 parse_checksums(".dsc", dsc_files, dsc, hashname)
384 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
387 ################################################################################
389 def parse_checksums(where, files, manifest, hashname):
391 field = 'checksums-%s' % hashname
392 if not field in manifest:
394 for line in manifest[field].split('\n'):
397 clist = line.strip().split(' ')
399 checksum, size, checkfile = clist
401 rejmsg.append("Cannot parse checksum line [%s]" % (line))
403 if not files.has_key(checkfile):
404 # TODO: check for the file's entry in the original files dict, not
405 # the one modified by (auto)byhand and other weird stuff
406 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
407 # (file, hashname, where))
409 if not files[checkfile]["size"] == size:
410 rejmsg.append("%s: size differs for files and checksums-%s entry "\
411 "in %s" % (checkfile, hashname, where))
413 files[checkfile][hash_key(hashname)] = checksum
414 for f in files.keys():
415 if not files[f].has_key(hash_key(hashname)):
416 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
420 ################################################################################
422 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
424 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
427 # Make sure we have a Files: field to parse...
428 if not changes.has_key(field):
429 raise NoFilesFieldError
431 # Make sure we recognise the format of the Files: field
432 format = re_verwithext.search(changes.get("format", "0.0"))
434 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
436 format = format.groups()
437 if format[1] == None:
438 format = int(float(format[0])), 0, format[2]
440 format = int(format[0]), int(format[1]), format[2]
441 if format[2] == None:
445 # format = (1,0) are the only formats we currently accept,
446 # format = (0,0) are missing format headers of which we still
447 # have some in the archive.
448 if format != (1,0) and format != (0,0):
449 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
451 if (format < (1,5) or format > (1,8)):
452 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
453 if field != "files" and format < (1,8):
454 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
456 includes_section = (not is_a_dsc) and field == "files"
458 # Parse each entry/line:
459 for i in changes[field].split('\n'):
463 section = priority = ""
466 (md5, size, section, priority, name) = s
468 (md5, size, name) = s
470 raise ParseChangesError, i
477 (section, component) = extract_component_from_section(section)
479 files[name] = Dict(size=size, section=section,
480 priority=priority, component=component)
481 files[name][hashname] = md5
485 ################################################################################
487 def send_mail (message, filename=""):
488 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
490 # If we've been passed a string dump it into a temporary file
492 (fd, filename) = tempfile.mkstemp()
493 os.write (fd, message)
496 if Cnf.has_key("Dinstall::MailWhiteList") and \
497 Cnf["Dinstall::MailWhiteList"] != "":
498 message_in = open_file(filename)
499 message_raw = modemail.message_from_file(message_in)
503 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
505 for line in whitelist_in:
506 if not re_whitespace_comment.match(line):
507 if re_re_mark.match(line):
508 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
510 whitelist.append(re.compile(re.escape(line.strip())))
515 fields = ["To", "Bcc", "Cc"]
518 value = message_raw.get(field, None)
521 for item in value.split(","):
522 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
528 if not mail_whitelisted:
529 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
533 # Doesn't have any mail in whitelist so remove the header
535 del message_raw[field]
537 message_raw.replace_header(field, string.join(match, ", "))
539 # Change message fields in order if we don't have a To header
540 if not message_raw.has_key("To"):
543 if message_raw.has_key(field):
544 message_raw[fields[-1]] = message_raw[field]
545 del message_raw[field]
548 # Clean up any temporary files
549 # and return, as we removed all recipients.
551 os.unlink (filename);
554 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
555 os.write (fd, message_raw.as_string(True));
559 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
561 raise SendmailFailedError, output
563 # Clean up any temporary files
567 ################################################################################
569 def poolify (source, component):
572 if source[:3] == "lib":
573 return component + source[:4] + '/' + source + '/'
575 return component + source[:1] + '/' + source + '/'
577 ################################################################################
579 def move (src, dest, overwrite = 0, perms = 0664):
580 if os.path.exists(dest) and os.path.isdir(dest):
583 dest_dir = os.path.dirname(dest)
584 if not os.path.exists(dest_dir):
585 umask = os.umask(00000)
586 os.makedirs(dest_dir, 02775)
588 #print "Moving %s to %s..." % (src, dest)
589 if os.path.exists(dest) and os.path.isdir(dest):
590 dest += '/' + os.path.basename(src)
591 # Don't overwrite unless forced to
592 if os.path.exists(dest):
594 fubar("Can't move %s to %s - file already exists." % (src, dest))
596 if not os.access(dest, os.W_OK):
597 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
598 shutil.copy2(src, dest)
599 os.chmod(dest, perms)
602 def copy (src, dest, overwrite = 0, perms = 0664):
603 if os.path.exists(dest) and os.path.isdir(dest):
606 dest_dir = os.path.dirname(dest)
607 if not os.path.exists(dest_dir):
608 umask = os.umask(00000)
609 os.makedirs(dest_dir, 02775)
611 #print "Copying %s to %s..." % (src, dest)
612 if os.path.exists(dest) and os.path.isdir(dest):
613 dest += '/' + os.path.basename(src)
614 # Don't overwrite unless forced to
615 if os.path.exists(dest):
617 raise FileExistsError
619 if not os.access(dest, os.W_OK):
620 raise CantOverwriteError
621 shutil.copy2(src, dest)
622 os.chmod(dest, perms)
624 ################################################################################
627 res = socket.gethostbyaddr(socket.gethostname())
628 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
629 if database_hostname:
630 return database_hostname
634 def which_conf_file ():
635 res = socket.gethostbyaddr(socket.gethostname())
636 # In case we allow local config files per user, try if one exists
637 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
638 homedir = os.getenv("HOME")
639 confpath = os.path.join(homedir, "/etc/dak.conf")
640 if os.path.exists(confpath):
641 apt_pkg.ReadConfigFileISC(Cnf,default_config)
643 # We are still in here, so there is no local config file or we do
644 # not allow local files. Do the normal stuff.
645 if Cnf.get("Config::" + res[0] + "::DakConfig"):
646 return Cnf["Config::" + res[0] + "::DakConfig"]
648 return default_config
650 def which_apt_conf_file ():
651 res = socket.gethostbyaddr(socket.gethostname())
652 # In case we allow local config files per user, try if one exists
653 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
654 homedir = os.getenv("HOME")
655 confpath = os.path.join(homedir, "/etc/dak.conf")
656 if os.path.exists(confpath):
657 apt_pkg.ReadConfigFileISC(Cnf,default_config)
659 if Cnf.get("Config::" + res[0] + "::AptConfig"):
660 return Cnf["Config::" + res[0] + "::AptConfig"]
662 return default_apt_config
664 def which_alias_file():
665 hostname = socket.gethostbyaddr(socket.gethostname())[0]
666 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
667 if os.path.exists(aliasfn):
672 ################################################################################
674 def TemplateSubst(map, filename):
675 """ Perform a substition of template """
676 templatefile = open_file(filename)
677 template = templatefile.read()
679 template = template.replace(x, str(map[x]))
683 ################################################################################
685 def fubar(msg, exit_code=1):
686 sys.stderr.write("E: %s\n" % (msg))
690 sys.stderr.write("W: %s\n" % (msg))
692 ################################################################################
694 # Returns the user name with a laughable attempt at rfc822 conformancy
695 # (read: removing stray periods).
697 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
699 ################################################################################
709 return ("%d%s" % (c, t))
711 ################################################################################
713 def cc_fix_changes (changes):
714 o = changes.get("architecture", "")
716 del changes["architecture"]
717 changes["architecture"] = {}
719 changes["architecture"][j] = 1
721 def changes_compare (a, b):
722 """ Sort by source name, source version, 'have source', and then by filename """
724 a_changes = parse_changes(a)
729 b_changes = parse_changes(b)
733 cc_fix_changes (a_changes)
734 cc_fix_changes (b_changes)
736 # Sort by source name
737 a_source = a_changes.get("source")
738 b_source = b_changes.get("source")
739 q = cmp (a_source, b_source)
743 # Sort by source version
744 a_version = a_changes.get("version", "0")
745 b_version = b_changes.get("version", "0")
746 q = apt_pkg.VersionCompare(a_version, b_version)
750 # Sort by 'have source'
751 a_has_source = a_changes["architecture"].get("source")
752 b_has_source = b_changes["architecture"].get("source")
753 if a_has_source and not b_has_source:
755 elif b_has_source and not a_has_source:
758 # Fall back to sort by filename
761 ################################################################################
763 def find_next_free (dest, too_many=100):
766 while os.path.exists(dest) and extra < too_many:
767 dest = orig_dest + '.' + repr(extra)
769 if extra >= too_many:
770 raise NoFreeFilenameError
773 ################################################################################
775 def result_join (original, sep = '\t'):
777 for i in xrange(len(original)):
778 if original[i] == None:
779 resultlist.append("")
781 resultlist.append(original[i])
782 return sep.join(resultlist)
784 ################################################################################
786 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
788 for line in str.split('\n'):
790 if line or include_blank_lines:
791 out += "%s%s\n" % (prefix, line)
792 # Strip trailing new line
797 ################################################################################
799 def validate_changes_file_arg(filename, require_changes=1):
801 'filename' is either a .changes or .dak file. If 'filename' is a
802 .dak file, it's changed to be the corresponding .changes file. The
803 function then checks if the .changes file a) exists and b) is
804 readable and returns the .changes filename if so. If there's a
805 problem, the next action depends on the option 'require_changes'
808 - If 'require_changes' == -1, errors are ignored and the .changes
809 filename is returned.
810 - If 'require_changes' == 0, a warning is given and 'None' is returned.
811 - If 'require_changes' == 1, a fatal error is raised.
816 orig_filename = filename
817 if filename.endswith(".dak"):
818 filename = filename[:-4]+".changes"
820 if not filename.endswith(".changes"):
821 error = "invalid file type; not a changes file"
823 if not os.access(filename,os.R_OK):
824 if os.path.exists(filename):
825 error = "permission denied"
827 error = "file not found"
830 if require_changes == 1:
831 fubar("%s: %s." % (orig_filename, error))
832 elif require_changes == 0:
833 warn("Skipping %s - %s" % (orig_filename, error))
835 else: # We only care about the .dak file
840 ################################################################################
843 return (arch != "source" and arch != "all")
845 ################################################################################
847 def join_with_commas_and(list):
848 if len(list) == 0: return "nothing"
849 if len(list) == 1: return list[0]
850 return ", ".join(list[:-1]) + " and " + list[-1]
852 ################################################################################
857 (pkg, version, constraint) = atom
859 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
862 pp_deps.append(pp_dep)
863 return " |".join(pp_deps)
865 ################################################################################
870 ################################################################################
872 def parse_args(Options):
873 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
874 # XXX: This should go away and everything which calls it be converted
875 # to use SQLA properly. For now, we'll just fix it not to use
876 # the old Pg interface though
877 session = DBConn().session()
881 for suitename in split_args(Options["Suite"]):
882 suite = get_suite(suitename, session=session)
883 if suite.suite_id is None:
884 warn("suite '%s' not recognised." % (suite.suite_name))
886 suite_ids_list.append(suite.suite_id)
888 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
890 fubar("No valid suite given.")
895 if Options["Component"]:
896 component_ids_list = []
897 for componentname in split_args(Options["Component"]):
898 component = get_component(componentname, session=session)
899 if component is None:
900 warn("component '%s' not recognised." % (componentname))
902 component_ids_list.append(component.component_id)
903 if component_ids_list:
904 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
906 fubar("No valid component given.")
910 # Process architecture
911 con_architectures = ""
913 if Options["Architecture"]:
915 for archname in split_args(Options["Architecture"]):
916 if archname == "source":
919 arch = get_architecture(archname, session=session)
921 warn("architecture '%s' not recognised." % (archname))
923 arch_ids_list.append(arch.arch_id)
925 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
928 fubar("No valid architecture given.")
932 return (con_suites, con_architectures, con_components, check_source)
934 ################################################################################
936 # Inspired(tm) by Bryn Keller's print_exc_plus (See
937 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
940 tb = sys.exc_info()[2]
949 traceback.print_exc()
951 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
952 frame.f_code.co_filename,
954 for key, value in frame.f_locals.items():
955 print "\t%20s = " % key,
959 print "<unable to print>"
961 ################################################################################
963 def try_with_debug(function):
971 ################################################################################
973 def arch_compare_sw (a, b):
975 Function for use in sorting lists of architectures.
977 Sorts normally except that 'source' dominates all others.
980 if a == "source" and b == "source":
989 ################################################################################
991 def split_args (s, dwim=1):
993 Split command line arguments which can be separated by either commas
994 or whitespace. If dwim is set, it will complain about string ending
995 in comma since this usually means someone did 'dak ls -a i386, m68k
996 foo' or something and the inevitable confusion resulting from 'm68k'
997 being treated as an argument is undesirable.
1000 if s.find(",") == -1:
1003 if s[-1:] == "," and dwim:
1004 fubar("split_args: found trailing comma, spurious space maybe?")
1007 ################################################################################
1009 def Dict(**dict): return dict
1011 ########################################
1013 def gpgv_get_status_output(cmd, status_read, status_write):
1015 Our very own version of commands.getouputstatus(), hacked to support
1019 cmd = ['/bin/sh', '-c', cmd]
1020 p2cread, p2cwrite = os.pipe()
1021 c2pread, c2pwrite = os.pipe()
1022 errout, errin = os.pipe()
1032 for i in range(3, 256):
1033 if i != status_write:
1039 os.execvp(cmd[0], cmd)
1045 os.dup2(c2pread, c2pwrite)
1046 os.dup2(errout, errin)
1048 output = status = ""
1050 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1053 r = os.read(fd, 8196)
1055 more_data.append(fd)
1056 if fd == c2pwrite or fd == errin:
1058 elif fd == status_read:
1061 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1063 pid, exit_status = os.waitpid(pid, 0)
1065 os.close(status_write)
1066 os.close(status_read)
1076 return output, status, exit_status
1078 ################################################################################
1080 def process_gpgv_output(status):
1081 # Process the status-fd output
1084 for line in status.split('\n'):
1088 split = line.split()
1090 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1092 (gnupg, keyword) = split[:2]
1093 if gnupg != "[GNUPG:]":
1094 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1097 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1098 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1101 keywords[keyword] = args
1103 return (keywords, internal_error)
1105 ################################################################################
1107 def retrieve_key (filename, keyserver=None, keyring=None):
1109 Retrieve the key that signed 'filename' from 'keyserver' and
1110 add it to 'keyring'. Returns nothing on success, or an error message
1114 # Defaults for keyserver and keyring
1116 keyserver = Cnf["Dinstall::KeyServer"]
1118 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1120 # Ensure the filename contains no shell meta-characters or other badness
1121 if not re_taint_free.match(filename):
1122 return "%s: tainted filename" % (filename)
1124 # Invoke gpgv on the file
1125 status_read, status_write = os.pipe()
1126 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1127 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1129 # Process the status-fd output
1130 (keywords, internal_error) = process_gpgv_output(status)
1132 return internal_error
1134 if not keywords.has_key("NO_PUBKEY"):
1135 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1137 fingerprint = keywords["NO_PUBKEY"][0]
1138 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1139 # it'll try to create a lockfile in /dev. A better solution might
1140 # be a tempfile or something.
1141 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1142 % (Cnf["Dinstall::SigningKeyring"])
1143 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1144 % (keyring, keyserver, fingerprint)
1145 (result, output) = commands.getstatusoutput(cmd)
1147 return "'%s' failed with exit code %s" % (cmd, result)
1151 ################################################################################
1153 def gpg_keyring_args(keyrings=None):
1155 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1157 return " ".join(["--keyring %s" % x for x in keyrings])
1159 ################################################################################
1161 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1163 Check the signature of a file and return the fingerprint if the
1164 signature is valid or 'None' if it's not. The first argument is the
1165 filename whose signature should be checked. The second argument is a
1166 reject function and is called when an error is found. The reject()
1167 function must allow for two arguments: the first is the error message,
1168 the second is an optional prefix string. It's possible for reject()
1169 to be called more than once during an invocation of check_signature().
1170 The third argument is optional and is the name of the files the
1171 detached signature applies to. The fourth argument is optional and is
1172 a *list* of keyrings to use. 'autofetch' can either be None, True or
1173 False. If None, the default behaviour specified in the config will be
1179 # Ensure the filename contains no shell meta-characters or other badness
1180 if not re_taint_free.match(sig_filename):
1181 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1182 return (None, rejects)
1184 if data_filename and not re_taint_free.match(data_filename):
1185 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1186 return (None, rejects)
1189 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1191 # Autofetch the signing key if that's enabled
1192 if autofetch == None:
1193 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1195 error_msg = retrieve_key(sig_filename)
1197 rejects.append(error_msg)
1198 return (None, rejects)
1200 # Build the command line
1201 status_read, status_write = os.pipe()
1202 cmd = "gpgv --status-fd %s %s %s %s" % (
1203 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1205 # Invoke gpgv on the file
1206 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1208 # Process the status-fd output
1209 (keywords, internal_error) = process_gpgv_output(status)
1211 # If we failed to parse the status-fd output, let's just whine and bail now
1213 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1214 rejects.append(internal_error, "")
1215 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1216 return (None, rejects)
1218 # Now check for obviously bad things in the processed output
1219 if keywords.has_key("KEYREVOKED"):
1220 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1221 if keywords.has_key("BADSIG"):
1222 rejects.append("bad signature on %s." % (sig_filename))
1223 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1224 rejects.append("failed to check signature on %s." % (sig_filename))
1225 if keywords.has_key("NO_PUBKEY"):
1226 args = keywords["NO_PUBKEY"]
1229 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1230 if keywords.has_key("BADARMOR"):
1231 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1232 if keywords.has_key("NODATA"):
1233 rejects.append("no signature found in %s." % (sig_filename))
1234 if keywords.has_key("EXPKEYSIG"):
1235 args = keywords["EXPKEYSIG"]
1238 rejects.append("Signature made by expired key 0x%s" % (key))
1239 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1240 args = keywords["KEYEXPIRED"]
1244 if timestamp.count("T") == 0:
1246 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1248 expiredate = "unknown (%s)" % (timestamp)
1250 expiredate = timestamp
1251 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1253 if len(rejects) > 0:
1254 return (None, rejects)
1256 # Next check gpgv exited with a zero return code
1258 rejects.append("gpgv failed while checking %s." % (sig_filename))
1260 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1262 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1263 return (None, rejects)
1265 # Sanity check the good stuff we expect
1266 if not keywords.has_key("VALIDSIG"):
1267 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1269 args = keywords["VALIDSIG"]
1271 rejects.append("internal error while checking signature on %s." % (sig_filename))
1273 fingerprint = args[0]
1274 if not keywords.has_key("GOODSIG"):
1275 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1276 if not keywords.has_key("SIG_ID"):
1277 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1279 # Finally ensure there's not something we don't recognise
1280 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1281 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1282 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1284 for keyword in keywords.keys():
1285 if not known_keywords.has_key(keyword):
1286 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1288 if len(rejects) > 0:
1289 return (None, rejects)
1291 return (fingerprint, [])
1293 ################################################################################
1295 def gpg_get_key_addresses(fingerprint):
1296 """retreive email addresses from gpg key uids for a given fingerprint"""
1297 addresses = key_uid_email_cache.get(fingerprint)
1298 if addresses != None:
1301 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1302 % (gpg_keyring_args(), fingerprint)
1303 (result, output) = commands.getstatusoutput(cmd)
1305 for l in output.split('\n'):
1306 m = re_gpg_uid.match(l)
1308 addresses.add(m.group(1))
1309 key_uid_email_cache[fingerprint] = addresses
1312 ################################################################################
1314 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1316 def wrap(paragraph, max_length, prefix=""):
1320 words = paragraph.split()
1323 word_size = len(word)
1324 if word_size > max_length:
1326 s += line + '\n' + prefix
1327 s += word + '\n' + prefix
1330 new_length = len(line) + word_size + 1
1331 if new_length > max_length:
1332 s += line + '\n' + prefix
1345 ################################################################################
1347 def clean_symlink (src, dest, root):
1349 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1352 src = src.replace(root, '', 1)
1353 dest = dest.replace(root, '', 1)
1354 dest = os.path.dirname(dest)
1355 new_src = '../' * len(dest.split('/'))
1356 return new_src + src
1358 ################################################################################
1360 def temp_filename(directory=None, prefix="dak", suffix=""):
1362 Return a secure and unique filename by pre-creating it.
1363 If 'directory' is non-null, it will be the directory the file is pre-created in.
1364 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1365 If 'suffix' is non-null, the filename will end with it.
1367 Returns a pair (fd, name).
1370 return tempfile.mkstemp(suffix, prefix, directory)
1372 ################################################################################
1374 def temp_dirname(parent=None, prefix="dak", suffix=""):
1376 Return a secure and unique directory by pre-creating it.
1377 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1378 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1379 If 'suffix' is non-null, the filename will end with it.
1381 Returns a pathname to the new directory
1384 return tempfile.mkdtemp(suffix, prefix, parent)
1386 ################################################################################
1388 def is_email_alias(email):
1389 """ checks if the user part of the email is listed in the alias file """
1391 if alias_cache == None:
1392 aliasfn = which_alias_file()
1395 for l in open(aliasfn):
1396 alias_cache.add(l.split(':')[0])
1397 uid = email.split('@')[0]
1398 return uid in alias_cache
1400 ################################################################################
1402 def get_changes_files(dir):
1404 Takes a directory and lists all .changes files in it (as well as chdir'ing
1405 to the directory; this is due to broken behaviour on the part of p-u/p-a
1406 when you're not in the right place)
1408 Returns a list of filenames
1411 # Much of the rest of p-u/p-a depends on being in the right place
1413 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1415 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1417 return changes_files
1419 ################################################################################
1423 Cnf = apt_pkg.newConfiguration()
1424 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1426 if which_conf_file() != default_config:
1427 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1429 ###############################################################################