2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
51 ################################################################################
53 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
54 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
56 alias_cache = None #: Cache for email alias checks
57 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
63 ################################################################################
66 """ Escape html chars """
67 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
69 ################################################################################
71 def open_file(filename, mode='r'):
73 Open C{file}, return fileobject.
75 @type filename: string
76 @param filename: path/filename to open
79 @param mode: open mode
82 @return: open fileobject
84 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
88 f = open(filename, mode)
90 raise CantOpenError, filename
93 ################################################################################
95 def our_raw_input(prompt=""):
97 sys.stdout.write(prompt)
103 sys.stderr.write("\nUser interrupt (^D).\n")
106 ################################################################################
108 def extract_component_from_section(section):
111 if section.find('/') != -1:
112 component = section.split('/')[0]
114 # Expand default component
116 if Cnf.has_key("Component::%s" % section):
121 return (section, component)
123 ################################################################################
125 def parse_deb822(contents, signing_rules=0):
129 # Split the lines in the input, keeping the linebreaks.
130 lines = contents.splitlines(True)
133 raise ParseChangesError, "[Empty changes file]"
135 # Reindex by line number so we can easily verify the format of
141 indexed_lines[index] = line[:-1]
145 num_of_lines = len(indexed_lines.keys())
148 while index < num_of_lines:
150 line = indexed_lines[index]
152 if signing_rules == 1:
154 if index > num_of_lines:
155 raise InvalidDscError, index
156 line = indexed_lines[index]
157 if not line.startswith("-----BEGIN PGP SIGNATURE"):
158 raise InvalidDscError, index
163 if line.startswith("-----BEGIN PGP SIGNATURE"):
165 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
167 if signing_rules == 1:
168 while index < num_of_lines and line != "":
170 line = indexed_lines[index]
172 # If we're not inside the signed data, don't process anything
173 if signing_rules >= 0 and not inside_signature:
175 slf = re_single_line_field.match(line)
177 field = slf.groups()[0].lower()
178 changes[field] = slf.groups()[1]
182 changes[field] += '\n'
184 mlf = re_multi_line_field.match(line)
187 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
188 if first == 1 and changes[field] != "":
189 changes[field] += '\n'
191 changes[field] += mlf.groups()[0] + '\n'
195 if signing_rules == 1 and inside_signature:
196 raise InvalidDscError, index
198 changes["filecontents"] = "".join(lines)
200 if changes.has_key("source"):
201 # Strip the source version in brackets from the source field,
202 # put it in the "source-version" field instead.
203 srcver = re_srchasver.search(changes["source"])
205 changes["source"] = srcver.group(1)
206 changes["source-version"] = srcver.group(2)
209 raise ParseChangesError, error
213 ################################################################################
215 def parse_changes(filename, signing_rules=0):
217 Parses a changes file and returns a dictionary where each field is a
218 key. The mandatory first argument is the filename of the .changes
221 signing_rules is an optional argument:
223 - If signing_rules == -1, no signature is required.
224 - If signing_rules == 0 (the default), a signature is required.
225 - If signing_rules == 1, it turns on the same strict format checking
228 The rules for (signing_rules == 1)-mode are:
230 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
231 followed by any PGP header data and must end with a blank line.
233 - The data section must end with a blank line and must be followed by
234 "-----BEGIN PGP SIGNATURE-----".
237 changes_in = open_file(filename)
238 content = changes_in.read()
241 unicode(content, 'utf-8')
243 raise ChangesUnicodeError, "Changes file not proper utf-8"
244 return parse_deb822(content, signing_rules)
246 ################################################################################
248 def hash_key(hashname):
249 return '%ssum' % hashname
251 ################################################################################
253 def create_hash(where, files, hashname, hashfunc):
255 create_hash extends the passed files dict with the given hash by
256 iterating over all files on disk and passing them to the hashing
261 for f in files.keys():
263 file_handle = open_file(f)
264 except CantOpenError:
265 rejmsg.append("Could not open file %s for checksumming" % (f))
268 files[f][hash_key(hashname)] = hashfunc(file_handle)
273 ################################################################################
275 def check_hash(where, files, hashname, hashfunc):
277 check_hash checks the given hash in the files dict against the actual
278 files on disk. The hash values need to be present consistently in
279 all file entries. It does not modify its input in any way.
283 for f in files.keys():
287 file_handle = open_file(f)
289 # Check for the hash entry, to not trigger a KeyError.
290 if not files[f].has_key(hash_key(hashname)):
291 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
295 # Actually check the hash for correctness.
296 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
297 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
299 except CantOpenError:
300 # TODO: This happens when the file is in the pool.
301 # warn("Cannot open file %s" % f)
308 ################################################################################
310 def check_size(where, files):
312 check_size checks the file sizes in the passed files dict against the
317 for f in files.keys():
322 # TODO: This happens when the file is in the pool.
326 actual_size = entry[stat.ST_SIZE]
327 size = int(files[f]["size"])
328 if size != actual_size:
329 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
330 % (f, actual_size, size, where))
333 ################################################################################
335 def check_hash_fields(what, manifest):
337 check_hash_fields ensures that there are no checksum fields in the
338 given dict that we do not know about.
342 hashes = map(lambda x: x[0], known_hashes)
343 for field in manifest:
344 if field.startswith("checksums-"):
345 hashname = field.split("-",1)[1]
346 if hashname not in hashes:
347 rejmsg.append("Unsupported checksum field for %s "\
348 "in %s" % (hashname, what))
351 ################################################################################
353 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
354 if format >= version:
355 # The version should contain the specified hash.
358 # Import hashes from the changes
359 rejmsg = parse_checksums(".changes", files, changes, hashname)
363 # We need to calculate the hash because it can't possibly
366 return func(".changes", files, hashname, hashfunc)
368 # We could add the orig which might be in the pool to the files dict to
369 # access the checksums easily.
371 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
373 ensure_dsc_hashes' task is to ensure that each and every *present* hash
374 in the dsc is correct, i.e. identical to the changes file and if necessary
375 the pool. The latter task is delegated to check_hash.
379 if not dsc.has_key('Checksums-%s' % (hashname,)):
381 # Import hashes from the dsc
382 parse_checksums(".dsc", dsc_files, dsc, hashname)
384 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
387 ################################################################################
389 def parse_checksums(where, files, manifest, hashname):
391 field = 'checksums-%s' % hashname
392 if not field in manifest:
394 for line in manifest[field].split('\n'):
397 checksum, size, checkfile = line.strip().split(' ')
398 if not files.has_key(checkfile):
399 # TODO: check for the file's entry in the original files dict, not
400 # the one modified by (auto)byhand and other weird stuff
401 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
402 # (file, hashname, where))
404 if not files[checkfile]["size"] == size:
405 rejmsg.append("%s: size differs for files and checksums-%s entry "\
406 "in %s" % (checkfile, hashname, where))
408 files[checkfile][hash_key(hashname)] = checksum
409 for f in files.keys():
410 if not files[f].has_key(hash_key(hashname)):
411 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
415 ################################################################################
417 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
419 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
422 # Make sure we have a Files: field to parse...
423 if not changes.has_key(field):
424 raise NoFilesFieldError
426 # Make sure we recognise the format of the Files: field
427 format = re_verwithext.search(changes.get("format", "0.0"))
429 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
431 format = format.groups()
432 if format[1] == None:
433 format = int(float(format[0])), 0, format[2]
435 format = int(format[0]), int(format[1]), format[2]
436 if format[2] == None:
440 # format = (1,0) are the only formats we currently accept,
441 # format = (0,0) are missing format headers of which we still
442 # have some in the archive.
443 if format != (1,0) and format != (0,0):
444 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
446 if (format < (1,5) or format > (1,8)):
447 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
448 if field != "files" and format < (1,8):
449 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
451 includes_section = (not is_a_dsc) and field == "files"
453 # Parse each entry/line:
454 for i in changes[field].split('\n'):
458 section = priority = ""
461 (md5, size, section, priority, name) = s
463 (md5, size, name) = s
465 raise ParseChangesError, i
472 (section, component) = extract_component_from_section(section)
474 files[name] = Dict(size=size, section=section,
475 priority=priority, component=component)
476 files[name][hashname] = md5
480 ################################################################################
482 def send_mail (message, filename=""):
483 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
485 # If we've been passed a string dump it into a temporary file
487 (fd, filename) = tempfile.mkstemp()
488 os.write (fd, message)
491 if Cnf.has_key("Dinstall::MailWhiteList") and \
492 Cnf["Dinstall::MailWhiteList"] != "":
493 message_in = open_file(filename)
494 message_raw = modemail.message_from_file(message_in)
498 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
500 for line in whitelist_in:
501 if not re_whitespace_comment.match(line):
502 if re_re_mark.match(line):
503 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
505 whitelist.append(re.compile(re.escape(line.strip())))
510 fields = ["To", "Bcc", "Cc"]
513 value = message_raw.get(field, None)
516 for item in value.split(","):
517 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
523 if not mail_whitelisted:
524 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
528 # Doesn't have any mail in whitelist so remove the header
530 del message_raw[field]
532 message_raw.replace_header(field, string.join(match, ", "))
534 # Change message fields in order if we don't have a To header
535 if not message_raw.has_key("To"):
538 if message_raw.has_key(field):
539 message_raw[fields[-1]] = message_raw[field]
540 del message_raw[field]
543 # Clean up any temporary files
544 # and return, as we removed all recipients.
546 os.unlink (filename);
549 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
550 os.write (fd, message_raw.as_string(True));
554 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
556 raise SendmailFailedError, output
558 # Clean up any temporary files
562 ################################################################################
564 def poolify (source, component):
567 if source[:3] == "lib":
568 return component + source[:4] + '/' + source + '/'
570 return component + source[:1] + '/' + source + '/'
572 ################################################################################
574 def move (src, dest, overwrite = 0, perms = 0664):
575 if os.path.exists(dest) and os.path.isdir(dest):
578 dest_dir = os.path.dirname(dest)
579 if not os.path.exists(dest_dir):
580 umask = os.umask(00000)
581 os.makedirs(dest_dir, 02775)
583 #print "Moving %s to %s..." % (src, dest)
584 if os.path.exists(dest) and os.path.isdir(dest):
585 dest += '/' + os.path.basename(src)
586 # Don't overwrite unless forced to
587 if os.path.exists(dest):
589 fubar("Can't move %s to %s - file already exists." % (src, dest))
591 if not os.access(dest, os.W_OK):
592 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
593 shutil.copy2(src, dest)
594 os.chmod(dest, perms)
597 def copy (src, dest, overwrite = 0, perms = 0664):
598 if os.path.exists(dest) and os.path.isdir(dest):
601 dest_dir = os.path.dirname(dest)
602 if not os.path.exists(dest_dir):
603 umask = os.umask(00000)
604 os.makedirs(dest_dir, 02775)
606 #print "Copying %s to %s..." % (src, dest)
607 if os.path.exists(dest) and os.path.isdir(dest):
608 dest += '/' + os.path.basename(src)
609 # Don't overwrite unless forced to
610 if os.path.exists(dest):
612 raise FileExistsError
614 if not os.access(dest, os.W_OK):
615 raise CantOverwriteError
616 shutil.copy2(src, dest)
617 os.chmod(dest, perms)
619 ################################################################################
622 res = socket.gethostbyaddr(socket.gethostname())
623 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
624 if database_hostname:
625 return database_hostname
629 def which_conf_file ():
630 res = socket.gethostbyaddr(socket.gethostname())
631 # In case we allow local config files per user, try if one exists
632 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
633 homedir = os.getenv("HOME")
634 confpath = os.path.join(homedir, "/etc/dak.conf")
635 if os.path.exists(confpath):
636 apt_pkg.ReadConfigFileISC(Cnf,default_config)
638 # We are still in here, so there is no local config file or we do
639 # not allow local files. Do the normal stuff.
640 if Cnf.get("Config::" + res[0] + "::DakConfig"):
641 return Cnf["Config::" + res[0] + "::DakConfig"]
643 return default_config
645 def which_apt_conf_file ():
646 res = socket.gethostbyaddr(socket.gethostname())
647 # In case we allow local config files per user, try if one exists
648 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
649 homedir = os.getenv("HOME")
650 confpath = os.path.join(homedir, "/etc/dak.conf")
651 if os.path.exists(confpath):
652 apt_pkg.ReadConfigFileISC(Cnf,default_config)
654 if Cnf.get("Config::" + res[0] + "::AptConfig"):
655 return Cnf["Config::" + res[0] + "::AptConfig"]
657 return default_apt_config
659 def which_alias_file():
660 hostname = socket.gethostbyaddr(socket.gethostname())[0]
661 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
662 if os.path.exists(aliasfn):
667 ################################################################################
669 def TemplateSubst(map, filename):
670 """ Perform a substition of template """
671 templatefile = open_file(filename)
672 template = templatefile.read()
674 template = template.replace(x,map[x])
678 ################################################################################
680 def fubar(msg, exit_code=1):
681 sys.stderr.write("E: %s\n" % (msg))
685 sys.stderr.write("W: %s\n" % (msg))
687 ################################################################################
689 # Returns the user name with a laughable attempt at rfc822 conformancy
690 # (read: removing stray periods).
692 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
695 return pwd.getpwuid(os.getuid())[0]
697 ################################################################################
707 return ("%d%s" % (c, t))
709 ################################################################################
711 def cc_fix_changes (changes):
712 o = changes.get("architecture", "")
714 del changes["architecture"]
715 changes["architecture"] = {}
717 changes["architecture"][j] = 1
719 def changes_compare (a, b):
720 """ Sort by source name, source version, 'have source', and then by filename """
722 a_changes = parse_changes(a)
727 b_changes = parse_changes(b)
731 cc_fix_changes (a_changes)
732 cc_fix_changes (b_changes)
734 # Sort by source name
735 a_source = a_changes.get("source")
736 b_source = b_changes.get("source")
737 q = cmp (a_source, b_source)
741 # Sort by source version
742 a_version = a_changes.get("version", "0")
743 b_version = b_changes.get("version", "0")
744 q = apt_pkg.VersionCompare(a_version, b_version)
748 # Sort by 'have source'
749 a_has_source = a_changes["architecture"].get("source")
750 b_has_source = b_changes["architecture"].get("source")
751 if a_has_source and not b_has_source:
753 elif b_has_source and not a_has_source:
756 # Fall back to sort by filename
759 ################################################################################
761 def find_next_free (dest, too_many=100):
764 while os.path.exists(dest) and extra < too_many:
765 dest = orig_dest + '.' + repr(extra)
767 if extra >= too_many:
768 raise NoFreeFilenameError
771 ################################################################################
773 def result_join (original, sep = '\t'):
775 for i in xrange(len(original)):
776 if original[i] == None:
777 resultlist.append("")
779 resultlist.append(original[i])
780 return sep.join(resultlist)
782 ################################################################################
784 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
786 for line in str.split('\n'):
788 if line or include_blank_lines:
789 out += "%s%s\n" % (prefix, line)
790 # Strip trailing new line
795 ################################################################################
797 def validate_changes_file_arg(filename, require_changes=1):
799 'filename' is either a .changes or .dak file. If 'filename' is a
800 .dak file, it's changed to be the corresponding .changes file. The
801 function then checks if the .changes file a) exists and b) is
802 readable and returns the .changes filename if so. If there's a
803 problem, the next action depends on the option 'require_changes'
806 - If 'require_changes' == -1, errors are ignored and the .changes
807 filename is returned.
808 - If 'require_changes' == 0, a warning is given and 'None' is returned.
809 - If 'require_changes' == 1, a fatal error is raised.
814 orig_filename = filename
815 if filename.endswith(".dak"):
816 filename = filename[:-4]+".changes"
818 if not filename.endswith(".changes"):
819 error = "invalid file type; not a changes file"
821 if not os.access(filename,os.R_OK):
822 if os.path.exists(filename):
823 error = "permission denied"
825 error = "file not found"
828 if require_changes == 1:
829 fubar("%s: %s." % (orig_filename, error))
830 elif require_changes == 0:
831 warn("Skipping %s - %s" % (orig_filename, error))
833 else: # We only care about the .dak file
838 ################################################################################
841 return (arch != "source" and arch != "all")
843 ################################################################################
845 def join_with_commas_and(list):
846 if len(list) == 0: return "nothing"
847 if len(list) == 1: return list[0]
848 return ", ".join(list[:-1]) + " and " + list[-1]
850 ################################################################################
855 (pkg, version, constraint) = atom
857 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
860 pp_deps.append(pp_dep)
861 return " |".join(pp_deps)
863 ################################################################################
868 ################################################################################
870 def parse_args(Options):
871 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
872 # XXX: This should go away and everything which calls it be converted
873 # to use SQLA properly. For now, we'll just fix it not to use
874 # the old Pg interface though
875 session = DBConn().session()
879 for suitename in split_args(Options["Suite"]):
880 suite = get_suite(suitename, session=session)
881 if suite.suite_id is None:
882 warn("suite '%s' not recognised." % (suite.suite_name))
884 suite_ids_list.append(suite.suite_id)
886 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
888 fubar("No valid suite given.")
893 if Options["Component"]:
894 component_ids_list = []
895 for componentname in split_args(Options["Component"]):
896 component = get_component(componentname, session=session)
897 if component is None:
898 warn("component '%s' not recognised." % (componentname))
900 component_ids_list.append(component.component_id)
901 if component_ids_list:
902 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
904 fubar("No valid component given.")
908 # Process architecture
909 con_architectures = ""
911 if Options["Architecture"]:
913 for archname in split_args(Options["Architecture"]):
914 if archname == "source":
917 arch = get_architecture(archname, session=session)
919 warn("architecture '%s' not recognised." % (archname))
921 arch_ids_list.append(arch.arch_id)
923 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
926 fubar("No valid architecture given.")
930 return (con_suites, con_architectures, con_components, check_source)
932 ################################################################################
934 # Inspired(tm) by Bryn Keller's print_exc_plus (See
935 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
938 tb = sys.exc_info()[2]
947 traceback.print_exc()
949 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
950 frame.f_code.co_filename,
952 for key, value in frame.f_locals.items():
953 print "\t%20s = " % key,
957 print "<unable to print>"
959 ################################################################################
961 def try_with_debug(function):
969 ################################################################################
971 def arch_compare_sw (a, b):
973 Function for use in sorting lists of architectures.
975 Sorts normally except that 'source' dominates all others.
978 if a == "source" and b == "source":
987 ################################################################################
989 def split_args (s, dwim=1):
991 Split command line arguments which can be separated by either commas
992 or whitespace. If dwim is set, it will complain about string ending
993 in comma since this usually means someone did 'dak ls -a i386, m68k
994 foo' or something and the inevitable confusion resulting from 'm68k'
995 being treated as an argument is undesirable.
998 if s.find(",") == -1:
1001 if s[-1:] == "," and dwim:
1002 fubar("split_args: found trailing comma, spurious space maybe?")
1005 ################################################################################
1007 def Dict(**dict): return dict
1009 ########################################
1011 def gpgv_get_status_output(cmd, status_read, status_write):
1013 Our very own version of commands.getouputstatus(), hacked to support
1017 cmd = ['/bin/sh', '-c', cmd]
1018 p2cread, p2cwrite = os.pipe()
1019 c2pread, c2pwrite = os.pipe()
1020 errout, errin = os.pipe()
1030 for i in range(3, 256):
1031 if i != status_write:
1037 os.execvp(cmd[0], cmd)
1043 os.dup2(c2pread, c2pwrite)
1044 os.dup2(errout, errin)
1046 output = status = ""
1048 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1051 r = os.read(fd, 8196)
1053 more_data.append(fd)
1054 if fd == c2pwrite or fd == errin:
1056 elif fd == status_read:
1059 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1061 pid, exit_status = os.waitpid(pid, 0)
1063 os.close(status_write)
1064 os.close(status_read)
1074 return output, status, exit_status
1076 ################################################################################
1078 def process_gpgv_output(status):
1079 # Process the status-fd output
1082 for line in status.split('\n'):
1086 split = line.split()
1088 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1090 (gnupg, keyword) = split[:2]
1091 if gnupg != "[GNUPG:]":
1092 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1095 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1096 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1099 keywords[keyword] = args
1101 return (keywords, internal_error)
1103 ################################################################################
1105 def retrieve_key (filename, keyserver=None, keyring=None):
1107 Retrieve the key that signed 'filename' from 'keyserver' and
1108 add it to 'keyring'. Returns nothing on success, or an error message
1112 # Defaults for keyserver and keyring
1114 keyserver = Cnf["Dinstall::KeyServer"]
1116 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1118 # Ensure the filename contains no shell meta-characters or other badness
1119 if not re_taint_free.match(filename):
1120 return "%s: tainted filename" % (filename)
1122 # Invoke gpgv on the file
1123 status_read, status_write = os.pipe()
1124 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1125 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1127 # Process the status-fd output
1128 (keywords, internal_error) = process_gpgv_output(status)
1130 return internal_error
1132 if not keywords.has_key("NO_PUBKEY"):
1133 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1135 fingerprint = keywords["NO_PUBKEY"][0]
1136 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1137 # it'll try to create a lockfile in /dev. A better solution might
1138 # be a tempfile or something.
1139 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1140 % (Cnf["Dinstall::SigningKeyring"])
1141 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1142 % (keyring, keyserver, fingerprint)
1143 (result, output) = commands.getstatusoutput(cmd)
1145 return "'%s' failed with exit code %s" % (cmd, result)
1149 ################################################################################
1151 def gpg_keyring_args(keyrings=None):
1153 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1155 return " ".join(["--keyring %s" % x for x in keyrings])
1157 ################################################################################
1159 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1161 Check the signature of a file and return the fingerprint if the
1162 signature is valid or 'None' if it's not. The first argument is the
1163 filename whose signature should be checked. The second argument is a
1164 reject function and is called when an error is found. The reject()
1165 function must allow for two arguments: the first is the error message,
1166 the second is an optional prefix string. It's possible for reject()
1167 to be called more than once during an invocation of check_signature().
1168 The third argument is optional and is the name of the files the
1169 detached signature applies to. The fourth argument is optional and is
1170 a *list* of keyrings to use. 'autofetch' can either be None, True or
1171 False. If None, the default behaviour specified in the config will be
1177 # Ensure the filename contains no shell meta-characters or other badness
1178 if not re_taint_free.match(sig_filename):
1179 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1180 return (None, rejects)
1182 if data_filename and not re_taint_free.match(data_filename):
1183 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1184 return (None, rejects)
1187 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1189 # Autofetch the signing key if that's enabled
1190 if autofetch == None:
1191 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1193 error_msg = retrieve_key(sig_filename)
1195 rejects.append(error_msg)
1196 return (None, rejects)
1198 # Build the command line
1199 status_read, status_write = os.pipe()
1200 cmd = "gpgv --status-fd %s %s %s %s" % (
1201 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1203 # Invoke gpgv on the file
1204 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1206 # Process the status-fd output
1207 (keywords, internal_error) = process_gpgv_output(status)
1209 # If we failed to parse the status-fd output, let's just whine and bail now
1211 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1212 rejects.append(internal_error, "")
1213 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1214 return (None, rejects)
1216 # Now check for obviously bad things in the processed output
1217 if keywords.has_key("KEYREVOKED"):
1218 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1219 if keywords.has_key("BADSIG"):
1220 rejects.append("bad signature on %s." % (sig_filename))
1221 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1222 rejects.append("failed to check signature on %s." % (sig_filename))
1223 if keywords.has_key("NO_PUBKEY"):
1224 args = keywords["NO_PUBKEY"]
1227 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1228 if keywords.has_key("BADARMOR"):
1229 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1230 if keywords.has_key("NODATA"):
1231 rejects.append("no signature found in %s." % (sig_filename))
1232 if keywords.has_key("EXPKEYSIG"):
1233 args = keywords["EXPKEYSIG"]
1236 rejects.append("Signature made by expired key 0x%s" % (key))
1237 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1238 args = keywords["KEYEXPIRED"]
1242 if timestamp.count("T") == 0:
1244 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1246 expiredate = "unknown (%s)" % (timestamp)
1248 expiredate = timestamp
1249 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1251 if len(rejects) > 0:
1252 return (None, rejects)
1254 # Next check gpgv exited with a zero return code
1256 rejects.append("gpgv failed while checking %s." % (sig_filename))
1258 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1260 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1261 return (None, rejects)
1263 # Sanity check the good stuff we expect
1264 if not keywords.has_key("VALIDSIG"):
1265 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1267 args = keywords["VALIDSIG"]
1269 rejects.append("internal error while checking signature on %s." % (sig_filename))
1271 fingerprint = args[0]
1272 if not keywords.has_key("GOODSIG"):
1273 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1274 if not keywords.has_key("SIG_ID"):
1275 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1277 # Finally ensure there's not something we don't recognise
1278 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1279 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1280 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1282 for keyword in keywords.keys():
1283 if not known_keywords.has_key(keyword):
1284 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1286 if len(rejects) > 0:
1287 return (None, rejects)
1289 return (fingerprint, [])
1291 ################################################################################
1293 def gpg_get_key_addresses(fingerprint):
1294 """retreive email addresses from gpg key uids for a given fingerprint"""
1295 addresses = key_uid_email_cache.get(fingerprint)
1296 if addresses != None:
1299 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1300 % (gpg_keyring_args(), fingerprint)
1301 (result, output) = commands.getstatusoutput(cmd)
1303 for l in output.split('\n'):
1304 m = re_gpg_uid.match(l)
1306 addresses.add(m.group(1))
1307 key_uid_email_cache[fingerprint] = addresses
1310 ################################################################################
1312 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1314 def wrap(paragraph, max_length, prefix=""):
1318 words = paragraph.split()
1321 word_size = len(word)
1322 if word_size > max_length:
1324 s += line + '\n' + prefix
1325 s += word + '\n' + prefix
1328 new_length = len(line) + word_size + 1
1329 if new_length > max_length:
1330 s += line + '\n' + prefix
1343 ################################################################################
1345 def clean_symlink (src, dest, root):
1347 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1350 src = src.replace(root, '', 1)
1351 dest = dest.replace(root, '', 1)
1352 dest = os.path.dirname(dest)
1353 new_src = '../' * len(dest.split('/'))
1354 return new_src + src
1356 ################################################################################
1358 def temp_filename(directory=None, prefix="dak", suffix=""):
1360 Return a secure and unique filename by pre-creating it.
1361 If 'directory' is non-null, it will be the directory the file is pre-created in.
1362 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1363 If 'suffix' is non-null, the filename will end with it.
1365 Returns a pair (fd, name).
1368 return tempfile.mkstemp(suffix, prefix, directory)
1370 ################################################################################
1372 def temp_dirname(parent=None, prefix="dak", suffix=""):
1374 Return a secure and unique directory by pre-creating it.
1375 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1376 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1377 If 'suffix' is non-null, the filename will end with it.
1379 Returns a pathname to the new directory
1382 return tempfile.mkdtemp(suffix, prefix, parent)
1384 ################################################################################
1386 def is_email_alias(email):
1387 """ checks if the user part of the email is listed in the alias file """
1389 if alias_cache == None:
1390 aliasfn = which_alias_file()
1393 for l in open(aliasfn):
1394 alias_cache.add(l.split(':')[0])
1395 uid = email.split('@')[0]
1396 return uid in alias_cache
1398 ################################################################################
1400 def get_changes_files(dir):
1402 Takes a directory and lists all .changes files in it (as well as chdir'ing
1403 to the directory; this is due to broken behaviour on the part of p-u/p-a
1404 when you're not in the right place)
1406 Returns a list of filenames
1409 # Much of the rest of p-u/p-a depends on being in the right place
1411 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1413 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1415 return changes_files
1417 ################################################################################
1421 Cnf = apt_pkg.newConfiguration()
1422 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1424 if which_conf_file() != default_config:
1425 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1427 ###############################################################################