2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
50 from srcformats import get_format_from_string
51 from collections import defaultdict
53 ################################################################################
55 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
56 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
58 alias_cache = None #: Cache for email alias checks
59 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
61 # (hashname, function, earliest_changes_version)
62 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
63 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
65 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
66 # all situations under lenny's Python.
68 def dak_getstatusoutput(cmd):
69 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
70 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
72 output = "".join(pipe.stdout.readlines())
79 commands.getstatusoutput = dak_getstatusoutput
81 ################################################################################
84 """ Escape html chars """
85 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
87 ################################################################################
89 def open_file(filename, mode='r'):
91 Open C{file}, return fileobject.
93 @type filename: string
94 @param filename: path/filename to open
97 @param mode: open mode
100 @return: open fileobject
102 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
106 f = open(filename, mode)
108 raise CantOpenError, filename
111 ################################################################################
113 def our_raw_input(prompt=""):
115 sys.stdout.write(prompt)
121 sys.stderr.write("\nUser interrupt (^D).\n")
124 ################################################################################
126 def extract_component_from_section(section):
129 if section.find('/') != -1:
130 component = section.split('/')[0]
132 # Expand default component
134 if Cnf.has_key("Component::%s" % section):
139 return (section, component)
141 ################################################################################
143 def parse_deb822(contents, signing_rules=0):
147 # Split the lines in the input, keeping the linebreaks.
148 lines = contents.splitlines(True)
151 raise ParseChangesError, "[Empty changes file]"
153 # Reindex by line number so we can easily verify the format of
159 indexed_lines[index] = line[:-1]
163 num_of_lines = len(indexed_lines.keys())
166 while index < num_of_lines:
168 line = indexed_lines[index]
170 if signing_rules == 1:
172 if index > num_of_lines:
173 raise InvalidDscError, index
174 line = indexed_lines[index]
175 if not line.startswith("-----BEGIN PGP SIGNATURE"):
176 raise InvalidDscError, index
181 if line.startswith("-----BEGIN PGP SIGNATURE"):
183 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
185 if signing_rules == 1:
186 while index < num_of_lines and line != "":
188 line = indexed_lines[index]
190 # If we're not inside the signed data, don't process anything
191 if signing_rules >= 0 and not inside_signature:
193 slf = re_single_line_field.match(line)
195 field = slf.groups()[0].lower()
196 changes[field] = slf.groups()[1]
200 changes[field] += '\n'
202 mlf = re_multi_line_field.match(line)
205 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
206 if first == 1 and changes[field] != "":
207 changes[field] += '\n'
209 changes[field] += mlf.groups()[0] + '\n'
213 if signing_rules == 1 and inside_signature:
214 raise InvalidDscError, index
216 changes["filecontents"] = "".join(lines)
218 if changes.has_key("source"):
219 # Strip the source version in brackets from the source field,
220 # put it in the "source-version" field instead.
221 srcver = re_srchasver.search(changes["source"])
223 changes["source"] = srcver.group(1)
224 changes["source-version"] = srcver.group(2)
227 raise ParseChangesError, error
231 ################################################################################
233 def parse_changes(filename, signing_rules=0):
235 Parses a changes file and returns a dictionary where each field is a
236 key. The mandatory first argument is the filename of the .changes
239 signing_rules is an optional argument:
241 - If signing_rules == -1, no signature is required.
242 - If signing_rules == 0 (the default), a signature is required.
243 - If signing_rules == 1, it turns on the same strict format checking
246 The rules for (signing_rules == 1)-mode are:
248 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
249 followed by any PGP header data and must end with a blank line.
251 - The data section must end with a blank line and must be followed by
252 "-----BEGIN PGP SIGNATURE-----".
255 changes_in = open_file(filename)
256 content = changes_in.read()
259 unicode(content, 'utf-8')
261 raise ChangesUnicodeError, "Changes file not proper utf-8"
262 return parse_deb822(content, signing_rules)
264 ################################################################################
266 def hash_key(hashname):
267 return '%ssum' % hashname
269 ################################################################################
271 def create_hash(where, files, hashname, hashfunc):
273 create_hash extends the passed files dict with the given hash by
274 iterating over all files on disk and passing them to the hashing
279 for f in files.keys():
281 file_handle = open_file(f)
282 except CantOpenError:
283 rejmsg.append("Could not open file %s for checksumming" % (f))
286 files[f][hash_key(hashname)] = hashfunc(file_handle)
291 ################################################################################
293 def check_hash(where, files, hashname, hashfunc):
295 check_hash checks the given hash in the files dict against the actual
296 files on disk. The hash values need to be present consistently in
297 all file entries. It does not modify its input in any way.
301 for f in files.keys():
305 file_handle = open_file(f)
307 # Check for the hash entry, to not trigger a KeyError.
308 if not files[f].has_key(hash_key(hashname)):
309 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
313 # Actually check the hash for correctness.
314 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
315 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
317 except CantOpenError:
318 # TODO: This happens when the file is in the pool.
319 # warn("Cannot open file %s" % f)
326 ################################################################################
328 def check_size(where, files):
330 check_size checks the file sizes in the passed files dict against the
335 for f in files.keys():
340 # TODO: This happens when the file is in the pool.
344 actual_size = entry[stat.ST_SIZE]
345 size = int(files[f]["size"])
346 if size != actual_size:
347 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
348 % (f, actual_size, size, where))
351 ################################################################################
353 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
355 Verify that the files listed in the Files field of the .dsc are
356 those expected given the announced Format.
358 @type dsc_filename: string
359 @param dsc_filename: path of .dsc file
362 @param dsc: the content of the .dsc parsed by C{parse_changes()}
364 @type dsc_files: dict
365 @param dsc_files: the file list returned by C{build_file_list()}
368 @return: all errors detected
372 # Parse the file if needed
374 dsc = parse_changes(dsc_filename, signing_rules=1);
376 if dsc_files is None:
377 dsc_files = build_file_list(dsc, is_a_dsc=1)
379 # Ensure .dsc lists proper set of source files according to the format
381 has = defaultdict(lambda: 0)
384 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
385 (r'diff.gz', ('debian_diff',)),
386 (r'tar.gz', ('native_tar_gz', 'native_tar')),
387 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
388 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
389 (r'tar\.(gz|bz2)', ('native_tar',)),
390 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
393 for f in dsc_files.keys():
394 m = re_issource.match(f)
396 rejmsg.append("%s: %s in Files field not recognised as source."
400 # Populate 'has' dictionary by resolving keys in lookup table
402 for regex, keys in ftype_lookup:
403 if re.match(regex, m.group(3)):
409 # File does not match anything in lookup table; reject
411 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
413 # Check for multiple files
414 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
415 if has[file_type] > 1:
416 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
418 # Source format specific tests
420 format = get_format_from_string(dsc['format'])
422 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
425 except UnknownFormatError:
426 # Not an error here for now
431 ################################################################################
433 def check_hash_fields(what, manifest):
435 check_hash_fields ensures that there are no checksum fields in the
436 given dict that we do not know about.
440 hashes = map(lambda x: x[0], known_hashes)
441 for field in manifest:
442 if field.startswith("checksums-"):
443 hashname = field.split("-",1)[1]
444 if hashname not in hashes:
445 rejmsg.append("Unsupported checksum field for %s "\
446 "in %s" % (hashname, what))
449 ################################################################################
451 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
452 if format >= version:
453 # The version should contain the specified hash.
456 # Import hashes from the changes
457 rejmsg = parse_checksums(".changes", files, changes, hashname)
461 # We need to calculate the hash because it can't possibly
464 return func(".changes", files, hashname, hashfunc)
466 # We could add the orig which might be in the pool to the files dict to
467 # access the checksums easily.
469 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
471 ensure_dsc_hashes' task is to ensure that each and every *present* hash
472 in the dsc is correct, i.e. identical to the changes file and if necessary
473 the pool. The latter task is delegated to check_hash.
477 if not dsc.has_key('Checksums-%s' % (hashname,)):
479 # Import hashes from the dsc
480 parse_checksums(".dsc", dsc_files, dsc, hashname)
482 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
485 ################################################################################
487 def parse_checksums(where, files, manifest, hashname):
489 field = 'checksums-%s' % hashname
490 if not field in manifest:
492 for line in manifest[field].split('\n'):
495 clist = line.strip().split(' ')
497 checksum, size, checkfile = clist
499 rejmsg.append("Cannot parse checksum line [%s]" % (line))
501 if not files.has_key(checkfile):
502 # TODO: check for the file's entry in the original files dict, not
503 # the one modified by (auto)byhand and other weird stuff
504 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
505 # (file, hashname, where))
507 if not files[checkfile]["size"] == size:
508 rejmsg.append("%s: size differs for files and checksums-%s entry "\
509 "in %s" % (checkfile, hashname, where))
511 files[checkfile][hash_key(hashname)] = checksum
512 for f in files.keys():
513 if not files[f].has_key(hash_key(hashname)):
514 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
518 ################################################################################
520 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
522 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
525 # Make sure we have a Files: field to parse...
526 if not changes.has_key(field):
527 raise NoFilesFieldError
529 # Get SourceFormat object for this Format and validate it
530 format = get_format_from_string(changes.get['format'])
531 format.validate_format(is_a_dsc=is_a_dsc, field=field)
533 includes_section = (not is_a_dsc) and field == "files"
535 # Parse each entry/line:
536 for i in changes[field].split('\n'):
540 section = priority = ""
543 (md5, size, section, priority, name) = s
545 (md5, size, name) = s
547 raise ParseChangesError, i
554 (section, component) = extract_component_from_section(section)
556 files[name] = Dict(size=size, section=section,
557 priority=priority, component=component)
558 files[name][hashname] = md5
562 ################################################################################
564 def send_mail (message, filename=""):
565 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
567 # If we've been passed a string dump it into a temporary file
569 (fd, filename) = tempfile.mkstemp()
570 os.write (fd, message)
573 if Cnf.has_key("Dinstall::MailWhiteList") and \
574 Cnf["Dinstall::MailWhiteList"] != "":
575 message_in = open_file(filename)
576 message_raw = modemail.message_from_file(message_in)
580 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
582 for line in whitelist_in:
583 if not re_whitespace_comment.match(line):
584 if re_re_mark.match(line):
585 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
587 whitelist.append(re.compile(re.escape(line.strip())))
592 fields = ["To", "Bcc", "Cc"]
595 value = message_raw.get(field, None)
598 for item in value.split(","):
599 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
605 if not mail_whitelisted:
606 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
610 # Doesn't have any mail in whitelist so remove the header
612 del message_raw[field]
614 message_raw.replace_header(field, string.join(match, ", "))
616 # Change message fields in order if we don't have a To header
617 if not message_raw.has_key("To"):
620 if message_raw.has_key(field):
621 message_raw[fields[-1]] = message_raw[field]
622 del message_raw[field]
625 # Clean up any temporary files
626 # and return, as we removed all recipients.
628 os.unlink (filename);
631 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
632 os.write (fd, message_raw.as_string(True));
636 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
638 raise SendmailFailedError, output
640 # Clean up any temporary files
644 ################################################################################
646 def poolify (source, component):
649 if source[:3] == "lib":
650 return component + source[:4] + '/' + source + '/'
652 return component + source[:1] + '/' + source + '/'
654 ################################################################################
656 def move (src, dest, overwrite = 0, perms = 0664):
657 if os.path.exists(dest) and os.path.isdir(dest):
660 dest_dir = os.path.dirname(dest)
661 if not os.path.exists(dest_dir):
662 umask = os.umask(00000)
663 os.makedirs(dest_dir, 02775)
665 #print "Moving %s to %s..." % (src, dest)
666 if os.path.exists(dest) and os.path.isdir(dest):
667 dest += '/' + os.path.basename(src)
668 # Don't overwrite unless forced to
669 if os.path.exists(dest):
671 fubar("Can't move %s to %s - file already exists." % (src, dest))
673 if not os.access(dest, os.W_OK):
674 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
675 shutil.copy2(src, dest)
676 os.chmod(dest, perms)
679 def copy (src, dest, overwrite = 0, perms = 0664):
680 if os.path.exists(dest) and os.path.isdir(dest):
683 dest_dir = os.path.dirname(dest)
684 if not os.path.exists(dest_dir):
685 umask = os.umask(00000)
686 os.makedirs(dest_dir, 02775)
688 #print "Copying %s to %s..." % (src, dest)
689 if os.path.exists(dest) and os.path.isdir(dest):
690 dest += '/' + os.path.basename(src)
691 # Don't overwrite unless forced to
692 if os.path.exists(dest):
694 raise FileExistsError
696 if not os.access(dest, os.W_OK):
697 raise CantOverwriteError
698 shutil.copy2(src, dest)
699 os.chmod(dest, perms)
701 ################################################################################
704 res = socket.gethostbyaddr(socket.gethostname())
705 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
706 if database_hostname:
707 return database_hostname
711 def which_conf_file ():
712 res = socket.gethostbyaddr(socket.gethostname())
713 # In case we allow local config files per user, try if one exists
714 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
715 homedir = os.getenv("HOME")
716 confpath = os.path.join(homedir, "/etc/dak.conf")
717 if os.path.exists(confpath):
718 apt_pkg.ReadConfigFileISC(Cnf,default_config)
720 # We are still in here, so there is no local config file or we do
721 # not allow local files. Do the normal stuff.
722 if Cnf.get("Config::" + res[0] + "::DakConfig"):
723 return Cnf["Config::" + res[0] + "::DakConfig"]
725 return default_config
727 def which_apt_conf_file ():
728 res = socket.gethostbyaddr(socket.gethostname())
729 # In case we allow local config files per user, try if one exists
730 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
731 homedir = os.getenv("HOME")
732 confpath = os.path.join(homedir, "/etc/dak.conf")
733 if os.path.exists(confpath):
734 apt_pkg.ReadConfigFileISC(Cnf,default_config)
736 if Cnf.get("Config::" + res[0] + "::AptConfig"):
737 return Cnf["Config::" + res[0] + "::AptConfig"]
739 return default_apt_config
741 def which_alias_file():
742 hostname = socket.gethostbyaddr(socket.gethostname())[0]
743 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
744 if os.path.exists(aliasfn):
749 ################################################################################
751 def TemplateSubst(map, filename):
752 """ Perform a substition of template """
753 templatefile = open_file(filename)
754 template = templatefile.read()
756 template = template.replace(x, str(map[x]))
760 ################################################################################
762 def fubar(msg, exit_code=1):
763 sys.stderr.write("E: %s\n" % (msg))
767 sys.stderr.write("W: %s\n" % (msg))
769 ################################################################################
771 # Returns the user name with a laughable attempt at rfc822 conformancy
772 # (read: removing stray periods).
774 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
777 return pwd.getpwuid(os.getuid())[0]
779 ################################################################################
789 return ("%d%s" % (c, t))
791 ################################################################################
793 def cc_fix_changes (changes):
794 o = changes.get("architecture", "")
796 del changes["architecture"]
797 changes["architecture"] = {}
799 changes["architecture"][j] = 1
801 def changes_compare (a, b):
802 """ Sort by source name, source version, 'have source', and then by filename """
804 a_changes = parse_changes(a)
809 b_changes = parse_changes(b)
813 cc_fix_changes (a_changes)
814 cc_fix_changes (b_changes)
816 # Sort by source name
817 a_source = a_changes.get("source")
818 b_source = b_changes.get("source")
819 q = cmp (a_source, b_source)
823 # Sort by source version
824 a_version = a_changes.get("version", "0")
825 b_version = b_changes.get("version", "0")
826 q = apt_pkg.VersionCompare(a_version, b_version)
830 # Sort by 'have source'
831 a_has_source = a_changes["architecture"].get("source")
832 b_has_source = b_changes["architecture"].get("source")
833 if a_has_source and not b_has_source:
835 elif b_has_source and not a_has_source:
838 # Fall back to sort by filename
841 ################################################################################
843 def find_next_free (dest, too_many=100):
846 while os.path.exists(dest) and extra < too_many:
847 dest = orig_dest + '.' + repr(extra)
849 if extra >= too_many:
850 raise NoFreeFilenameError
853 ################################################################################
855 def result_join (original, sep = '\t'):
857 for i in xrange(len(original)):
858 if original[i] == None:
859 resultlist.append("")
861 resultlist.append(original[i])
862 return sep.join(resultlist)
864 ################################################################################
866 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
868 for line in str.split('\n'):
870 if line or include_blank_lines:
871 out += "%s%s\n" % (prefix, line)
872 # Strip trailing new line
877 ################################################################################
879 def validate_changes_file_arg(filename, require_changes=1):
881 'filename' is either a .changes or .dak file. If 'filename' is a
882 .dak file, it's changed to be the corresponding .changes file. The
883 function then checks if the .changes file a) exists and b) is
884 readable and returns the .changes filename if so. If there's a
885 problem, the next action depends on the option 'require_changes'
888 - If 'require_changes' == -1, errors are ignored and the .changes
889 filename is returned.
890 - If 'require_changes' == 0, a warning is given and 'None' is returned.
891 - If 'require_changes' == 1, a fatal error is raised.
896 orig_filename = filename
897 if filename.endswith(".dak"):
898 filename = filename[:-4]+".changes"
900 if not filename.endswith(".changes"):
901 error = "invalid file type; not a changes file"
903 if not os.access(filename,os.R_OK):
904 if os.path.exists(filename):
905 error = "permission denied"
907 error = "file not found"
910 if require_changes == 1:
911 fubar("%s: %s." % (orig_filename, error))
912 elif require_changes == 0:
913 warn("Skipping %s - %s" % (orig_filename, error))
915 else: # We only care about the .dak file
920 ################################################################################
923 return (arch != "source" and arch != "all")
925 ################################################################################
927 def join_with_commas_and(list):
928 if len(list) == 0: return "nothing"
929 if len(list) == 1: return list[0]
930 return ", ".join(list[:-1]) + " and " + list[-1]
932 ################################################################################
937 (pkg, version, constraint) = atom
939 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
942 pp_deps.append(pp_dep)
943 return " |".join(pp_deps)
945 ################################################################################
950 ################################################################################
952 def parse_args(Options):
953 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
954 # XXX: This should go away and everything which calls it be converted
955 # to use SQLA properly. For now, we'll just fix it not to use
956 # the old Pg interface though
957 session = DBConn().session()
961 for suitename in split_args(Options["Suite"]):
962 suite = get_suite(suitename, session=session)
963 if suite.suite_id is None:
964 warn("suite '%s' not recognised." % (suite.suite_name))
966 suite_ids_list.append(suite.suite_id)
968 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
970 fubar("No valid suite given.")
975 if Options["Component"]:
976 component_ids_list = []
977 for componentname in split_args(Options["Component"]):
978 component = get_component(componentname, session=session)
979 if component is None:
980 warn("component '%s' not recognised." % (componentname))
982 component_ids_list.append(component.component_id)
983 if component_ids_list:
984 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
986 fubar("No valid component given.")
990 # Process architecture
991 con_architectures = ""
993 if Options["Architecture"]:
995 for archname in split_args(Options["Architecture"]):
996 if archname == "source":
999 arch = get_architecture(archname, session=session)
1001 warn("architecture '%s' not recognised." % (archname))
1003 arch_ids_list.append(arch.arch_id)
1005 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1007 if not check_source:
1008 fubar("No valid architecture given.")
1012 return (con_suites, con_architectures, con_components, check_source)
1014 ################################################################################
1016 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1017 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1020 tb = sys.exc_info()[2]
1027 frame = frame.f_back
1029 traceback.print_exc()
1031 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1032 frame.f_code.co_filename,
1034 for key, value in frame.f_locals.items():
1035 print "\t%20s = " % key,
1039 print "<unable to print>"
1041 ################################################################################
1043 def try_with_debug(function):
1051 ################################################################################
1053 def arch_compare_sw (a, b):
1055 Function for use in sorting lists of architectures.
1057 Sorts normally except that 'source' dominates all others.
1060 if a == "source" and b == "source":
1069 ################################################################################
1071 def split_args (s, dwim=1):
1073 Split command line arguments which can be separated by either commas
1074 or whitespace. If dwim is set, it will complain about string ending
1075 in comma since this usually means someone did 'dak ls -a i386, m68k
1076 foo' or something and the inevitable confusion resulting from 'm68k'
1077 being treated as an argument is undesirable.
1080 if s.find(",") == -1:
1083 if s[-1:] == "," and dwim:
1084 fubar("split_args: found trailing comma, spurious space maybe?")
1087 ################################################################################
1089 def Dict(**dict): return dict
1091 ########################################
1093 def gpgv_get_status_output(cmd, status_read, status_write):
1095 Our very own version of commands.getouputstatus(), hacked to support
1099 cmd = ['/bin/sh', '-c', cmd]
1100 p2cread, p2cwrite = os.pipe()
1101 c2pread, c2pwrite = os.pipe()
1102 errout, errin = os.pipe()
1112 for i in range(3, 256):
1113 if i != status_write:
1119 os.execvp(cmd[0], cmd)
1125 os.dup2(c2pread, c2pwrite)
1126 os.dup2(errout, errin)
1128 output = status = ""
1130 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1133 r = os.read(fd, 8196)
1135 more_data.append(fd)
1136 if fd == c2pwrite or fd == errin:
1138 elif fd == status_read:
1141 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1143 pid, exit_status = os.waitpid(pid, 0)
1145 os.close(status_write)
1146 os.close(status_read)
1156 return output, status, exit_status
1158 ################################################################################
1160 def process_gpgv_output(status):
1161 # Process the status-fd output
1164 for line in status.split('\n'):
1168 split = line.split()
1170 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1172 (gnupg, keyword) = split[:2]
1173 if gnupg != "[GNUPG:]":
1174 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1177 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1178 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1181 keywords[keyword] = args
1183 return (keywords, internal_error)
1185 ################################################################################
1187 def retrieve_key (filename, keyserver=None, keyring=None):
1189 Retrieve the key that signed 'filename' from 'keyserver' and
1190 add it to 'keyring'. Returns nothing on success, or an error message
1194 # Defaults for keyserver and keyring
1196 keyserver = Cnf["Dinstall::KeyServer"]
1198 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1200 # Ensure the filename contains no shell meta-characters or other badness
1201 if not re_taint_free.match(filename):
1202 return "%s: tainted filename" % (filename)
1204 # Invoke gpgv on the file
1205 status_read, status_write = os.pipe()
1206 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1207 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1209 # Process the status-fd output
1210 (keywords, internal_error) = process_gpgv_output(status)
1212 return internal_error
1214 if not keywords.has_key("NO_PUBKEY"):
1215 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1217 fingerprint = keywords["NO_PUBKEY"][0]
1218 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1219 # it'll try to create a lockfile in /dev. A better solution might
1220 # be a tempfile or something.
1221 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1222 % (Cnf["Dinstall::SigningKeyring"])
1223 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1224 % (keyring, keyserver, fingerprint)
1225 (result, output) = commands.getstatusoutput(cmd)
1227 return "'%s' failed with exit code %s" % (cmd, result)
1231 ################################################################################
1233 def gpg_keyring_args(keyrings=None):
1235 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1237 return " ".join(["--keyring %s" % x for x in keyrings])
1239 ################################################################################
1241 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1243 Check the signature of a file and return the fingerprint if the
1244 signature is valid or 'None' if it's not. The first argument is the
1245 filename whose signature should be checked. The second argument is a
1246 reject function and is called when an error is found. The reject()
1247 function must allow for two arguments: the first is the error message,
1248 the second is an optional prefix string. It's possible for reject()
1249 to be called more than once during an invocation of check_signature().
1250 The third argument is optional and is the name of the files the
1251 detached signature applies to. The fourth argument is optional and is
1252 a *list* of keyrings to use. 'autofetch' can either be None, True or
1253 False. If None, the default behaviour specified in the config will be
1259 # Ensure the filename contains no shell meta-characters or other badness
1260 if not re_taint_free.match(sig_filename):
1261 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1262 return (None, rejects)
1264 if data_filename and not re_taint_free.match(data_filename):
1265 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1266 return (None, rejects)
1269 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1271 # Autofetch the signing key if that's enabled
1272 if autofetch == None:
1273 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1275 error_msg = retrieve_key(sig_filename)
1277 rejects.append(error_msg)
1278 return (None, rejects)
1280 # Build the command line
1281 status_read, status_write = os.pipe()
1282 cmd = "gpgv --status-fd %s %s %s %s" % (
1283 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1285 # Invoke gpgv on the file
1286 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1288 # Process the status-fd output
1289 (keywords, internal_error) = process_gpgv_output(status)
1291 # If we failed to parse the status-fd output, let's just whine and bail now
1293 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1294 rejects.append(internal_error, "")
1295 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1296 return (None, rejects)
1298 # Now check for obviously bad things in the processed output
1299 if keywords.has_key("KEYREVOKED"):
1300 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1301 if keywords.has_key("BADSIG"):
1302 rejects.append("bad signature on %s." % (sig_filename))
1303 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1304 rejects.append("failed to check signature on %s." % (sig_filename))
1305 if keywords.has_key("NO_PUBKEY"):
1306 args = keywords["NO_PUBKEY"]
1309 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1310 if keywords.has_key("BADARMOR"):
1311 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1312 if keywords.has_key("NODATA"):
1313 rejects.append("no signature found in %s." % (sig_filename))
1314 if keywords.has_key("EXPKEYSIG"):
1315 args = keywords["EXPKEYSIG"]
1318 rejects.append("Signature made by expired key 0x%s" % (key))
1319 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1320 args = keywords["KEYEXPIRED"]
1324 if timestamp.count("T") == 0:
1326 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1328 expiredate = "unknown (%s)" % (timestamp)
1330 expiredate = timestamp
1331 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1333 if len(rejects) > 0:
1334 return (None, rejects)
1336 # Next check gpgv exited with a zero return code
1338 rejects.append("gpgv failed while checking %s." % (sig_filename))
1340 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1342 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1343 return (None, rejects)
1345 # Sanity check the good stuff we expect
1346 if not keywords.has_key("VALIDSIG"):
1347 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1349 args = keywords["VALIDSIG"]
1351 rejects.append("internal error while checking signature on %s." % (sig_filename))
1353 fingerprint = args[0]
1354 if not keywords.has_key("GOODSIG"):
1355 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1356 if not keywords.has_key("SIG_ID"):
1357 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1359 # Finally ensure there's not something we don't recognise
1360 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1361 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1362 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1364 for keyword in keywords.keys():
1365 if not known_keywords.has_key(keyword):
1366 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1368 if len(rejects) > 0:
1369 return (None, rejects)
1371 return (fingerprint, [])
1373 ################################################################################
1375 def gpg_get_key_addresses(fingerprint):
1376 """retreive email addresses from gpg key uids for a given fingerprint"""
1377 addresses = key_uid_email_cache.get(fingerprint)
1378 if addresses != None:
1381 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1382 % (gpg_keyring_args(), fingerprint)
1383 (result, output) = commands.getstatusoutput(cmd)
1385 for l in output.split('\n'):
1386 m = re_gpg_uid.match(l)
1388 addresses.add(m.group(1))
1389 key_uid_email_cache[fingerprint] = addresses
1392 ################################################################################
1394 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1396 def wrap(paragraph, max_length, prefix=""):
1400 words = paragraph.split()
1403 word_size = len(word)
1404 if word_size > max_length:
1406 s += line + '\n' + prefix
1407 s += word + '\n' + prefix
1410 new_length = len(line) + word_size + 1
1411 if new_length > max_length:
1412 s += line + '\n' + prefix
1425 ################################################################################
1427 def clean_symlink (src, dest, root):
1429 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1432 src = src.replace(root, '', 1)
1433 dest = dest.replace(root, '', 1)
1434 dest = os.path.dirname(dest)
1435 new_src = '../' * len(dest.split('/'))
1436 return new_src + src
1438 ################################################################################
1440 def temp_filename(directory=None, prefix="dak", suffix=""):
1442 Return a secure and unique filename by pre-creating it.
1443 If 'directory' is non-null, it will be the directory the file is pre-created in.
1444 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1445 If 'suffix' is non-null, the filename will end with it.
1447 Returns a pair (fd, name).
1450 return tempfile.mkstemp(suffix, prefix, directory)
1452 ################################################################################
1454 def temp_dirname(parent=None, prefix="dak", suffix=""):
1456 Return a secure and unique directory by pre-creating it.
1457 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1458 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1459 If 'suffix' is non-null, the filename will end with it.
1461 Returns a pathname to the new directory
1464 return tempfile.mkdtemp(suffix, prefix, parent)
1466 ################################################################################
1468 def is_email_alias(email):
1469 """ checks if the user part of the email is listed in the alias file """
1471 if alias_cache == None:
1472 aliasfn = which_alias_file()
1475 for l in open(aliasfn):
1476 alias_cache.add(l.split(':')[0])
1477 uid = email.split('@')[0]
1478 return uid in alias_cache
1480 ################################################################################
1482 def get_changes_files(dir):
1484 Takes a directory and lists all .changes files in it (as well as chdir'ing
1485 to the directory; this is due to broken behaviour on the part of p-u/p-a
1486 when you're not in the right place)
1488 Returns a list of filenames
1491 # Much of the rest of p-u/p-a depends on being in the right place
1493 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1495 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1497 return changes_files
1499 ################################################################################
1503 Cnf = apt_pkg.newConfiguration()
1504 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1506 if which_conf_file() != default_config:
1507 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1509 ###############################################################################