2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
50 from formats import parse_format, validate_changes_format
51 from srcformats import get_format_from_string
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
67 # all situations under lenny's Python.
69 def dak_getstatusoutput(cmd):
70 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
71 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
73 output = "".join(pipe.stdout.readlines())
80 commands.getstatusoutput = dak_getstatusoutput
82 ################################################################################
85 """ Escape html chars """
86 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
88 ################################################################################
90 def open_file(filename, mode='r'):
92 Open C{file}, return fileobject.
94 @type filename: string
95 @param filename: path/filename to open
98 @param mode: open mode
101 @return: open fileobject
103 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
107 f = open(filename, mode)
109 raise CantOpenError, filename
112 ################################################################################
114 def our_raw_input(prompt=""):
116 sys.stdout.write(prompt)
122 sys.stderr.write("\nUser interrupt (^D).\n")
125 ################################################################################
127 def extract_component_from_section(section):
130 if section.find('/') != -1:
131 component = section.split('/')[0]
133 # Expand default component
135 if Cnf.has_key("Component::%s" % section):
140 return (section, component)
142 ################################################################################
144 def parse_deb822(contents, signing_rules=0):
148 # Split the lines in the input, keeping the linebreaks.
149 lines = contents.splitlines(True)
152 raise ParseChangesError, "[Empty changes file]"
154 # Reindex by line number so we can easily verify the format of
160 indexed_lines[index] = line[:-1]
164 num_of_lines = len(indexed_lines.keys())
167 while index < num_of_lines:
169 line = indexed_lines[index]
171 if signing_rules == 1:
173 if index > num_of_lines:
174 raise InvalidDscError, index
175 line = indexed_lines[index]
176 if not line.startswith("-----BEGIN PGP SIGNATURE"):
177 raise InvalidDscError, index
182 if line.startswith("-----BEGIN PGP SIGNATURE"):
184 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
186 if signing_rules == 1:
187 while index < num_of_lines and line != "":
189 line = indexed_lines[index]
191 # If we're not inside the signed data, don't process anything
192 if signing_rules >= 0 and not inside_signature:
194 slf = re_single_line_field.match(line)
196 field = slf.groups()[0].lower()
197 changes[field] = slf.groups()[1]
201 changes[field] += '\n'
203 mlf = re_multi_line_field.match(line)
206 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
207 if first == 1 and changes[field] != "":
208 changes[field] += '\n'
210 changes[field] += mlf.groups()[0] + '\n'
214 if signing_rules == 1 and inside_signature:
215 raise InvalidDscError, index
217 changes["filecontents"] = "".join(lines)
219 if changes.has_key("source"):
220 # Strip the source version in brackets from the source field,
221 # put it in the "source-version" field instead.
222 srcver = re_srchasver.search(changes["source"])
224 changes["source"] = srcver.group(1)
225 changes["source-version"] = srcver.group(2)
228 raise ParseChangesError, error
232 ################################################################################
234 def parse_changes(filename, signing_rules=0):
236 Parses a changes file and returns a dictionary where each field is a
237 key. The mandatory first argument is the filename of the .changes
240 signing_rules is an optional argument:
242 - If signing_rules == -1, no signature is required.
243 - If signing_rules == 0 (the default), a signature is required.
244 - If signing_rules == 1, it turns on the same strict format checking
247 The rules for (signing_rules == 1)-mode are:
249 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
250 followed by any PGP header data and must end with a blank line.
252 - The data section must end with a blank line and must be followed by
253 "-----BEGIN PGP SIGNATURE-----".
256 changes_in = open_file(filename)
257 content = changes_in.read()
260 unicode(content, 'utf-8')
262 raise ChangesUnicodeError, "Changes file not proper utf-8"
263 return parse_deb822(content, signing_rules)
265 ################################################################################
267 def hash_key(hashname):
268 return '%ssum' % hashname
270 ################################################################################
272 def create_hash(where, files, hashname, hashfunc):
274 create_hash extends the passed files dict with the given hash by
275 iterating over all files on disk and passing them to the hashing
280 for f in files.keys():
282 file_handle = open_file(f)
283 except CantOpenError:
284 rejmsg.append("Could not open file %s for checksumming" % (f))
287 files[f][hash_key(hashname)] = hashfunc(file_handle)
292 ################################################################################
294 def check_hash(where, files, hashname, hashfunc):
296 check_hash checks the given hash in the files dict against the actual
297 files on disk. The hash values need to be present consistently in
298 all file entries. It does not modify its input in any way.
302 for f in files.keys():
306 file_handle = open_file(f)
308 # Check for the hash entry, to not trigger a KeyError.
309 if not files[f].has_key(hash_key(hashname)):
310 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
314 # Actually check the hash for correctness.
315 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
316 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
318 except CantOpenError:
319 # TODO: This happens when the file is in the pool.
320 # warn("Cannot open file %s" % f)
327 ################################################################################
329 def check_size(where, files):
331 check_size checks the file sizes in the passed files dict against the
336 for f in files.keys():
341 # TODO: This happens when the file is in the pool.
345 actual_size = entry[stat.ST_SIZE]
346 size = int(files[f]["size"])
347 if size != actual_size:
348 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
349 % (f, actual_size, size, where))
352 ################################################################################
354 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
356 Verify that the files listed in the Files field of the .dsc are
357 those expected given the announced Format.
359 @type dsc_filename: string
360 @param dsc_filename: path of .dsc file
363 @param dsc: the content of the .dsc parsed by C{parse_changes()}
365 @type dsc_files: dict
366 @param dsc_files: the file list returned by C{build_file_list()}
369 @return: all errors detected
373 # Parse the file if needed
375 dsc = parse_changes(dsc_filename, signing_rules=1);
377 if dsc_files is None:
378 dsc_files = build_file_list(dsc, is_a_dsc=1)
380 # Ensure .dsc lists proper set of source files according to the format
382 has = defaultdict(lambda: 0)
385 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
386 (r'diff.gz', ('debian_diff',)),
387 (r'tar.gz', ('native_tar_gz', 'native_tar')),
388 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
389 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
390 (r'tar\.(gz|bz2)', ('native_tar',)),
391 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
394 for f in dsc_files.keys():
395 m = re_issource.match(f)
397 rejmsg.append("%s: %s in Files field not recognised as source."
401 # Populate 'has' dictionary by resolving keys in lookup table
403 for regex, keys in ftype_lookup:
404 if re.match(regex, m.group(3)):
410 # File does not match anything in lookup table; reject
412 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
414 # Check for multiple files
415 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
416 if has[file_type] > 1:
417 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
419 # Source format specific tests
421 format = get_format_from_string(dsc['format'])
423 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
426 except UnknownFormatError:
427 # Not an error here for now
432 ################################################################################
434 def check_hash_fields(what, manifest):
436 check_hash_fields ensures that there are no checksum fields in the
437 given dict that we do not know about.
441 hashes = map(lambda x: x[0], known_hashes)
442 for field in manifest:
443 if field.startswith("checksums-"):
444 hashname = field.split("-",1)[1]
445 if hashname not in hashes:
446 rejmsg.append("Unsupported checksum field for %s "\
447 "in %s" % (hashname, what))
450 ################################################################################
452 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
453 if format >= version:
454 # The version should contain the specified hash.
457 # Import hashes from the changes
458 rejmsg = parse_checksums(".changes", files, changes, hashname)
462 # We need to calculate the hash because it can't possibly
465 return func(".changes", files, hashname, hashfunc)
467 # We could add the orig which might be in the pool to the files dict to
468 # access the checksums easily.
470 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
472 ensure_dsc_hashes' task is to ensure that each and every *present* hash
473 in the dsc is correct, i.e. identical to the changes file and if necessary
474 the pool. The latter task is delegated to check_hash.
478 if not dsc.has_key('Checksums-%s' % (hashname,)):
480 # Import hashes from the dsc
481 parse_checksums(".dsc", dsc_files, dsc, hashname)
483 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
486 ################################################################################
488 def parse_checksums(where, files, manifest, hashname):
490 field = 'checksums-%s' % hashname
491 if not field in manifest:
493 for line in manifest[field].split('\n'):
496 clist = line.strip().split(' ')
498 checksum, size, checkfile = clist
500 rejmsg.append("Cannot parse checksum line [%s]" % (line))
502 if not files.has_key(checkfile):
503 # TODO: check for the file's entry in the original files dict, not
504 # the one modified by (auto)byhand and other weird stuff
505 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
506 # (file, hashname, where))
508 if not files[checkfile]["size"] == size:
509 rejmsg.append("%s: size differs for files and checksums-%s entry "\
510 "in %s" % (checkfile, hashname, where))
512 files[checkfile][hash_key(hashname)] = checksum
513 for f in files.keys():
514 if not files[f].has_key(hash_key(hashname)):
515 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
519 ################################################################################
521 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
523 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
526 # Make sure we have a Files: field to parse...
527 if not changes.has_key(field):
528 raise NoFilesFieldError
530 # Validate .changes Format: field
532 validate_changes_format(parse_format(changes['format']), field)
534 includes_section = (not is_a_dsc) and field == "files"
536 # Parse each entry/line:
537 for i in changes[field].split('\n'):
541 section = priority = ""
544 (md5, size, section, priority, name) = s
546 (md5, size, name) = s
548 raise ParseChangesError, i
555 (section, component) = extract_component_from_section(section)
557 files[name] = Dict(size=size, section=section,
558 priority=priority, component=component)
559 files[name][hashname] = md5
563 ################################################################################
565 def send_mail (message, filename=""):
566 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
568 # If we've been passed a string dump it into a temporary file
570 (fd, filename) = tempfile.mkstemp()
571 os.write (fd, message)
574 if Cnf.has_key("Dinstall::MailWhiteList") and \
575 Cnf["Dinstall::MailWhiteList"] != "":
576 message_in = open_file(filename)
577 message_raw = modemail.message_from_file(message_in)
581 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
583 for line in whitelist_in:
584 if not re_whitespace_comment.match(line):
585 if re_re_mark.match(line):
586 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
588 whitelist.append(re.compile(re.escape(line.strip())))
593 fields = ["To", "Bcc", "Cc"]
596 value = message_raw.get(field, None)
599 for item in value.split(","):
600 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
606 if not mail_whitelisted:
607 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
611 # Doesn't have any mail in whitelist so remove the header
613 del message_raw[field]
615 message_raw.replace_header(field, string.join(match, ", "))
617 # Change message fields in order if we don't have a To header
618 if not message_raw.has_key("To"):
621 if message_raw.has_key(field):
622 message_raw[fields[-1]] = message_raw[field]
623 del message_raw[field]
626 # Clean up any temporary files
627 # and return, as we removed all recipients.
629 os.unlink (filename);
632 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
633 os.write (fd, message_raw.as_string(True));
637 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
639 raise SendmailFailedError, output
641 # Clean up any temporary files
645 ################################################################################
647 def poolify (source, component):
650 if source[:3] == "lib":
651 return component + source[:4] + '/' + source + '/'
653 return component + source[:1] + '/' + source + '/'
655 ################################################################################
657 def move (src, dest, overwrite = 0, perms = 0664):
658 if os.path.exists(dest) and os.path.isdir(dest):
661 dest_dir = os.path.dirname(dest)
662 if not os.path.exists(dest_dir):
663 umask = os.umask(00000)
664 os.makedirs(dest_dir, 02775)
666 #print "Moving %s to %s..." % (src, dest)
667 if os.path.exists(dest) and os.path.isdir(dest):
668 dest += '/' + os.path.basename(src)
669 # Don't overwrite unless forced to
670 if os.path.exists(dest):
672 fubar("Can't move %s to %s - file already exists." % (src, dest))
674 if not os.access(dest, os.W_OK):
675 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
676 shutil.copy2(src, dest)
677 os.chmod(dest, perms)
680 def copy (src, dest, overwrite = 0, perms = 0664):
681 if os.path.exists(dest) and os.path.isdir(dest):
684 dest_dir = os.path.dirname(dest)
685 if not os.path.exists(dest_dir):
686 umask = os.umask(00000)
687 os.makedirs(dest_dir, 02775)
689 #print "Copying %s to %s..." % (src, dest)
690 if os.path.exists(dest) and os.path.isdir(dest):
691 dest += '/' + os.path.basename(src)
692 # Don't overwrite unless forced to
693 if os.path.exists(dest):
695 raise FileExistsError
697 if not os.access(dest, os.W_OK):
698 raise CantOverwriteError
699 shutil.copy2(src, dest)
700 os.chmod(dest, perms)
702 ################################################################################
705 res = socket.gethostbyaddr(socket.gethostname())
706 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
707 if database_hostname:
708 return database_hostname
712 def which_conf_file ():
713 res = socket.gethostbyaddr(socket.gethostname())
714 # In case we allow local config files per user, try if one exists
715 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
716 homedir = os.getenv("HOME")
717 confpath = os.path.join(homedir, "/etc/dak.conf")
718 if os.path.exists(confpath):
719 apt_pkg.ReadConfigFileISC(Cnf,default_config)
721 # We are still in here, so there is no local config file or we do
722 # not allow local files. Do the normal stuff.
723 if Cnf.get("Config::" + res[0] + "::DakConfig"):
724 return Cnf["Config::" + res[0] + "::DakConfig"]
726 return default_config
728 def which_apt_conf_file ():
729 res = socket.gethostbyaddr(socket.gethostname())
730 # In case we allow local config files per user, try if one exists
731 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
732 homedir = os.getenv("HOME")
733 confpath = os.path.join(homedir, "/etc/dak.conf")
734 if os.path.exists(confpath):
735 apt_pkg.ReadConfigFileISC(Cnf,default_config)
737 if Cnf.get("Config::" + res[0] + "::AptConfig"):
738 return Cnf["Config::" + res[0] + "::AptConfig"]
740 return default_apt_config
742 def which_alias_file():
743 hostname = socket.gethostbyaddr(socket.gethostname())[0]
744 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
745 if os.path.exists(aliasfn):
750 ################################################################################
752 def TemplateSubst(map, filename):
753 """ Perform a substition of template """
754 templatefile = open_file(filename)
755 template = templatefile.read()
757 template = template.replace(x, str(map[x]))
761 ################################################################################
763 def fubar(msg, exit_code=1):
764 sys.stderr.write("E: %s\n" % (msg))
768 sys.stderr.write("W: %s\n" % (msg))
770 ################################################################################
772 # Returns the user name with a laughable attempt at rfc822 conformancy
773 # (read: removing stray periods).
775 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
778 return pwd.getpwuid(os.getuid())[0]
780 ################################################################################
790 return ("%d%s" % (c, t))
792 ################################################################################
794 def cc_fix_changes (changes):
795 o = changes.get("architecture", "")
797 del changes["architecture"]
798 changes["architecture"] = {}
800 changes["architecture"][j] = 1
802 def changes_compare (a, b):
803 """ Sort by source name, source version, 'have source', and then by filename """
805 a_changes = parse_changes(a)
810 b_changes = parse_changes(b)
814 cc_fix_changes (a_changes)
815 cc_fix_changes (b_changes)
817 # Sort by source name
818 a_source = a_changes.get("source")
819 b_source = b_changes.get("source")
820 q = cmp (a_source, b_source)
824 # Sort by source version
825 a_version = a_changes.get("version", "0")
826 b_version = b_changes.get("version", "0")
827 q = apt_pkg.VersionCompare(a_version, b_version)
831 # Sort by 'have source'
832 a_has_source = a_changes["architecture"].get("source")
833 b_has_source = b_changes["architecture"].get("source")
834 if a_has_source and not b_has_source:
836 elif b_has_source and not a_has_source:
839 # Fall back to sort by filename
842 ################################################################################
844 def find_next_free (dest, too_many=100):
847 while os.path.exists(dest) and extra < too_many:
848 dest = orig_dest + '.' + repr(extra)
850 if extra >= too_many:
851 raise NoFreeFilenameError
854 ################################################################################
856 def result_join (original, sep = '\t'):
858 for i in xrange(len(original)):
859 if original[i] == None:
860 resultlist.append("")
862 resultlist.append(original[i])
863 return sep.join(resultlist)
865 ################################################################################
867 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
869 for line in str.split('\n'):
871 if line or include_blank_lines:
872 out += "%s%s\n" % (prefix, line)
873 # Strip trailing new line
878 ################################################################################
880 def validate_changes_file_arg(filename, require_changes=1):
882 'filename' is either a .changes or .dak file. If 'filename' is a
883 .dak file, it's changed to be the corresponding .changes file. The
884 function then checks if the .changes file a) exists and b) is
885 readable and returns the .changes filename if so. If there's a
886 problem, the next action depends on the option 'require_changes'
889 - If 'require_changes' == -1, errors are ignored and the .changes
890 filename is returned.
891 - If 'require_changes' == 0, a warning is given and 'None' is returned.
892 - If 'require_changes' == 1, a fatal error is raised.
897 orig_filename = filename
898 if filename.endswith(".dak"):
899 filename = filename[:-4]+".changes"
901 if not filename.endswith(".changes"):
902 error = "invalid file type; not a changes file"
904 if not os.access(filename,os.R_OK):
905 if os.path.exists(filename):
906 error = "permission denied"
908 error = "file not found"
911 if require_changes == 1:
912 fubar("%s: %s." % (orig_filename, error))
913 elif require_changes == 0:
914 warn("Skipping %s - %s" % (orig_filename, error))
916 else: # We only care about the .dak file
921 ################################################################################
924 return (arch != "source" and arch != "all")
926 ################################################################################
928 def join_with_commas_and(list):
929 if len(list) == 0: return "nothing"
930 if len(list) == 1: return list[0]
931 return ", ".join(list[:-1]) + " and " + list[-1]
933 ################################################################################
938 (pkg, version, constraint) = atom
940 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
943 pp_deps.append(pp_dep)
944 return " |".join(pp_deps)
946 ################################################################################
951 ################################################################################
953 def parse_args(Options):
954 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
955 # XXX: This should go away and everything which calls it be converted
956 # to use SQLA properly. For now, we'll just fix it not to use
957 # the old Pg interface though
958 session = DBConn().session()
962 for suitename in split_args(Options["Suite"]):
963 suite = get_suite(suitename, session=session)
964 if suite.suite_id is None:
965 warn("suite '%s' not recognised." % (suite.suite_name))
967 suite_ids_list.append(suite.suite_id)
969 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
971 fubar("No valid suite given.")
976 if Options["Component"]:
977 component_ids_list = []
978 for componentname in split_args(Options["Component"]):
979 component = get_component(componentname, session=session)
980 if component is None:
981 warn("component '%s' not recognised." % (componentname))
983 component_ids_list.append(component.component_id)
984 if component_ids_list:
985 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
987 fubar("No valid component given.")
991 # Process architecture
992 con_architectures = ""
994 if Options["Architecture"]:
996 for archname in split_args(Options["Architecture"]):
997 if archname == "source":
1000 arch = get_architecture(archname, session=session)
1002 warn("architecture '%s' not recognised." % (archname))
1004 arch_ids_list.append(arch.arch_id)
1006 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1008 if not check_source:
1009 fubar("No valid architecture given.")
1013 return (con_suites, con_architectures, con_components, check_source)
1015 ################################################################################
1017 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1018 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1021 tb = sys.exc_info()[2]
1028 frame = frame.f_back
1030 traceback.print_exc()
1032 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1033 frame.f_code.co_filename,
1035 for key, value in frame.f_locals.items():
1036 print "\t%20s = " % key,
1040 print "<unable to print>"
1042 ################################################################################
1044 def try_with_debug(function):
1052 ################################################################################
1054 def arch_compare_sw (a, b):
1056 Function for use in sorting lists of architectures.
1058 Sorts normally except that 'source' dominates all others.
1061 if a == "source" and b == "source":
1070 ################################################################################
1072 def split_args (s, dwim=1):
1074 Split command line arguments which can be separated by either commas
1075 or whitespace. If dwim is set, it will complain about string ending
1076 in comma since this usually means someone did 'dak ls -a i386, m68k
1077 foo' or something and the inevitable confusion resulting from 'm68k'
1078 being treated as an argument is undesirable.
1081 if s.find(",") == -1:
1084 if s[-1:] == "," and dwim:
1085 fubar("split_args: found trailing comma, spurious space maybe?")
1088 ################################################################################
1090 def Dict(**dict): return dict
1092 ########################################
1094 def gpgv_get_status_output(cmd, status_read, status_write):
1096 Our very own version of commands.getouputstatus(), hacked to support
1100 cmd = ['/bin/sh', '-c', cmd]
1101 p2cread, p2cwrite = os.pipe()
1102 c2pread, c2pwrite = os.pipe()
1103 errout, errin = os.pipe()
1113 for i in range(3, 256):
1114 if i != status_write:
1120 os.execvp(cmd[0], cmd)
1126 os.dup2(c2pread, c2pwrite)
1127 os.dup2(errout, errin)
1129 output = status = ""
1131 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1134 r = os.read(fd, 8196)
1136 more_data.append(fd)
1137 if fd == c2pwrite or fd == errin:
1139 elif fd == status_read:
1142 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1144 pid, exit_status = os.waitpid(pid, 0)
1146 os.close(status_write)
1147 os.close(status_read)
1157 return output, status, exit_status
1159 ################################################################################
1161 def process_gpgv_output(status):
1162 # Process the status-fd output
1165 for line in status.split('\n'):
1169 split = line.split()
1171 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1173 (gnupg, keyword) = split[:2]
1174 if gnupg != "[GNUPG:]":
1175 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1178 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1179 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1182 keywords[keyword] = args
1184 return (keywords, internal_error)
1186 ################################################################################
1188 def retrieve_key (filename, keyserver=None, keyring=None):
1190 Retrieve the key that signed 'filename' from 'keyserver' and
1191 add it to 'keyring'. Returns nothing on success, or an error message
1195 # Defaults for keyserver and keyring
1197 keyserver = Cnf["Dinstall::KeyServer"]
1199 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1201 # Ensure the filename contains no shell meta-characters or other badness
1202 if not re_taint_free.match(filename):
1203 return "%s: tainted filename" % (filename)
1205 # Invoke gpgv on the file
1206 status_read, status_write = os.pipe()
1207 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1208 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1210 # Process the status-fd output
1211 (keywords, internal_error) = process_gpgv_output(status)
1213 return internal_error
1215 if not keywords.has_key("NO_PUBKEY"):
1216 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1218 fingerprint = keywords["NO_PUBKEY"][0]
1219 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1220 # it'll try to create a lockfile in /dev. A better solution might
1221 # be a tempfile or something.
1222 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1223 % (Cnf["Dinstall::SigningKeyring"])
1224 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1225 % (keyring, keyserver, fingerprint)
1226 (result, output) = commands.getstatusoutput(cmd)
1228 return "'%s' failed with exit code %s" % (cmd, result)
1232 ################################################################################
1234 def gpg_keyring_args(keyrings=None):
1236 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1238 return " ".join(["--keyring %s" % x for x in keyrings])
1240 ################################################################################
1242 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1244 Check the signature of a file and return the fingerprint if the
1245 signature is valid or 'None' if it's not. The first argument is the
1246 filename whose signature should be checked. The second argument is a
1247 reject function and is called when an error is found. The reject()
1248 function must allow for two arguments: the first is the error message,
1249 the second is an optional prefix string. It's possible for reject()
1250 to be called more than once during an invocation of check_signature().
1251 The third argument is optional and is the name of the files the
1252 detached signature applies to. The fourth argument is optional and is
1253 a *list* of keyrings to use. 'autofetch' can either be None, True or
1254 False. If None, the default behaviour specified in the config will be
1260 # Ensure the filename contains no shell meta-characters or other badness
1261 if not re_taint_free.match(sig_filename):
1262 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1263 return (None, rejects)
1265 if data_filename and not re_taint_free.match(data_filename):
1266 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1267 return (None, rejects)
1270 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1272 # Autofetch the signing key if that's enabled
1273 if autofetch == None:
1274 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1276 error_msg = retrieve_key(sig_filename)
1278 rejects.append(error_msg)
1279 return (None, rejects)
1281 # Build the command line
1282 status_read, status_write = os.pipe()
1283 cmd = "gpgv --status-fd %s %s %s %s" % (
1284 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1286 # Invoke gpgv on the file
1287 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1289 # Process the status-fd output
1290 (keywords, internal_error) = process_gpgv_output(status)
1292 # If we failed to parse the status-fd output, let's just whine and bail now
1294 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1295 rejects.append(internal_error, "")
1296 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1297 return (None, rejects)
1299 # Now check for obviously bad things in the processed output
1300 if keywords.has_key("KEYREVOKED"):
1301 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1302 if keywords.has_key("BADSIG"):
1303 rejects.append("bad signature on %s." % (sig_filename))
1304 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1305 rejects.append("failed to check signature on %s." % (sig_filename))
1306 if keywords.has_key("NO_PUBKEY"):
1307 args = keywords["NO_PUBKEY"]
1310 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1311 if keywords.has_key("BADARMOR"):
1312 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1313 if keywords.has_key("NODATA"):
1314 rejects.append("no signature found in %s." % (sig_filename))
1315 if keywords.has_key("EXPKEYSIG"):
1316 args = keywords["EXPKEYSIG"]
1319 rejects.append("Signature made by expired key 0x%s" % (key))
1320 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1321 args = keywords["KEYEXPIRED"]
1325 if timestamp.count("T") == 0:
1327 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1329 expiredate = "unknown (%s)" % (timestamp)
1331 expiredate = timestamp
1332 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1334 if len(rejects) > 0:
1335 return (None, rejects)
1337 # Next check gpgv exited with a zero return code
1339 rejects.append("gpgv failed while checking %s." % (sig_filename))
1341 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1343 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1344 return (None, rejects)
1346 # Sanity check the good stuff we expect
1347 if not keywords.has_key("VALIDSIG"):
1348 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1350 args = keywords["VALIDSIG"]
1352 rejects.append("internal error while checking signature on %s." % (sig_filename))
1354 fingerprint = args[0]
1355 if not keywords.has_key("GOODSIG"):
1356 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1357 if not keywords.has_key("SIG_ID"):
1358 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1360 # Finally ensure there's not something we don't recognise
1361 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1362 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1363 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1365 for keyword in keywords.keys():
1366 if not known_keywords.has_key(keyword):
1367 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1369 if len(rejects) > 0:
1370 return (None, rejects)
1372 return (fingerprint, [])
1374 ################################################################################
1376 def gpg_get_key_addresses(fingerprint):
1377 """retreive email addresses from gpg key uids for a given fingerprint"""
1378 addresses = key_uid_email_cache.get(fingerprint)
1379 if addresses != None:
1382 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1383 % (gpg_keyring_args(), fingerprint)
1384 (result, output) = commands.getstatusoutput(cmd)
1386 for l in output.split('\n'):
1387 m = re_gpg_uid.match(l)
1389 addresses.add(m.group(1))
1390 key_uid_email_cache[fingerprint] = addresses
1393 ################################################################################
1395 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1397 def wrap(paragraph, max_length, prefix=""):
1401 words = paragraph.split()
1404 word_size = len(word)
1405 if word_size > max_length:
1407 s += line + '\n' + prefix
1408 s += word + '\n' + prefix
1411 new_length = len(line) + word_size + 1
1412 if new_length > max_length:
1413 s += line + '\n' + prefix
1426 ################################################################################
1428 def clean_symlink (src, dest, root):
1430 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1433 src = src.replace(root, '', 1)
1434 dest = dest.replace(root, '', 1)
1435 dest = os.path.dirname(dest)
1436 new_src = '../' * len(dest.split('/'))
1437 return new_src + src
1439 ################################################################################
1441 def temp_filename(directory=None, prefix="dak", suffix=""):
1443 Return a secure and unique filename by pre-creating it.
1444 If 'directory' is non-null, it will be the directory the file is pre-created in.
1445 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1446 If 'suffix' is non-null, the filename will end with it.
1448 Returns a pair (fd, name).
1451 return tempfile.mkstemp(suffix, prefix, directory)
1453 ################################################################################
1455 def temp_dirname(parent=None, prefix="dak", suffix=""):
1457 Return a secure and unique directory by pre-creating it.
1458 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1459 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1460 If 'suffix' is non-null, the filename will end with it.
1462 Returns a pathname to the new directory
1465 return tempfile.mkdtemp(suffix, prefix, parent)
1467 ################################################################################
1469 def is_email_alias(email):
1470 """ checks if the user part of the email is listed in the alias file """
1472 if alias_cache == None:
1473 aliasfn = which_alias_file()
1476 for l in open(aliasfn):
1477 alias_cache.add(l.split(':')[0])
1478 uid = email.split('@')[0]
1479 return uid in alias_cache
1481 ################################################################################
1483 def get_changes_files(dir):
1485 Takes a directory and lists all .changes files in it (as well as chdir'ing
1486 to the directory; this is due to broken behaviour on the part of p-u/p-a
1487 when you're not in the right place)
1489 Returns a list of filenames
1492 # Much of the rest of p-u/p-a depends on being in the right place
1494 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1496 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1498 return changes_files
1500 ################################################################################
1504 Cnf = apt_pkg.newConfiguration()
1505 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1507 if which_conf_file() != default_config:
1508 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())