2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from formats import parse_format, validate_changes_format
52 from srcformats import get_format_from_string
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
68 # all situations under lenny's Python.
70 def dak_getstatusoutput(cmd):
71 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
72 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
74 output = "".join(pipe.stdout.readlines())
81 commands.getstatusoutput = dak_getstatusoutput
83 ################################################################################
86 """ Escape html chars """
87 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
89 ################################################################################
91 def open_file(filename, mode='r'):
93 Open C{file}, return fileobject.
95 @type filename: string
96 @param filename: path/filename to open
99 @param mode: open mode
102 @return: open fileobject
104 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
108 f = open(filename, mode)
110 raise CantOpenError, filename
113 ################################################################################
115 def our_raw_input(prompt=""):
117 sys.stdout.write(prompt)
123 sys.stderr.write("\nUser interrupt (^D).\n")
126 ################################################################################
128 def extract_component_from_section(section):
131 if section.find('/') != -1:
132 component = section.split('/')[0]
134 # Expand default component
136 if Cnf.has_key("Component::%s" % section):
141 return (section, component)
143 ################################################################################
145 def parse_deb822(contents, signing_rules=0):
149 # Split the lines in the input, keeping the linebreaks.
150 lines = contents.splitlines(True)
153 raise ParseChangesError, "[Empty changes file]"
155 # Reindex by line number so we can easily verify the format of
161 indexed_lines[index] = line[:-1]
165 num_of_lines = len(indexed_lines.keys())
168 while index < num_of_lines:
170 line = indexed_lines[index]
172 if signing_rules == 1:
174 if index > num_of_lines:
175 raise InvalidDscError, index
176 line = indexed_lines[index]
177 if not line.startswith("-----BEGIN PGP SIGNATURE"):
178 raise InvalidDscError, index
183 if line.startswith("-----BEGIN PGP SIGNATURE"):
185 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
187 if signing_rules == 1:
188 while index < num_of_lines and line != "":
190 line = indexed_lines[index]
192 # If we're not inside the signed data, don't process anything
193 if signing_rules >= 0 and not inside_signature:
195 slf = re_single_line_field.match(line)
197 field = slf.groups()[0].lower()
198 changes[field] = slf.groups()[1]
202 changes[field] += '\n'
204 mlf = re_multi_line_field.match(line)
207 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
208 if first == 1 and changes[field] != "":
209 changes[field] += '\n'
211 changes[field] += mlf.groups()[0] + '\n'
215 if signing_rules == 1 and inside_signature:
216 raise InvalidDscError, index
218 changes["filecontents"] = "".join(lines)
220 if changes.has_key("source"):
221 # Strip the source version in brackets from the source field,
222 # put it in the "source-version" field instead.
223 srcver = re_srchasver.search(changes["source"])
225 changes["source"] = srcver.group(1)
226 changes["source-version"] = srcver.group(2)
229 raise ParseChangesError, error
233 ################################################################################
235 def parse_changes(filename, signing_rules=0):
237 Parses a changes file and returns a dictionary where each field is a
238 key. The mandatory first argument is the filename of the .changes
241 signing_rules is an optional argument:
243 - If signing_rules == -1, no signature is required.
244 - If signing_rules == 0 (the default), a signature is required.
245 - If signing_rules == 1, it turns on the same strict format checking
248 The rules for (signing_rules == 1)-mode are:
250 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
251 followed by any PGP header data and must end with a blank line.
253 - The data section must end with a blank line and must be followed by
254 "-----BEGIN PGP SIGNATURE-----".
257 changes_in = open_file(filename)
258 content = changes_in.read()
261 unicode(content, 'utf-8')
263 raise ChangesUnicodeError, "Changes file not proper utf-8"
264 return parse_deb822(content, signing_rules)
266 ################################################################################
268 def hash_key(hashname):
269 return '%ssum' % hashname
271 ################################################################################
273 def create_hash(where, files, hashname, hashfunc):
275 create_hash extends the passed files dict with the given hash by
276 iterating over all files on disk and passing them to the hashing
281 for f in files.keys():
283 file_handle = open_file(f)
284 except CantOpenError:
285 rejmsg.append("Could not open file %s for checksumming" % (f))
288 files[f][hash_key(hashname)] = hashfunc(file_handle)
293 ################################################################################
295 def check_hash(where, files, hashname, hashfunc):
297 check_hash checks the given hash in the files dict against the actual
298 files on disk. The hash values need to be present consistently in
299 all file entries. It does not modify its input in any way.
303 for f in files.keys():
307 file_handle = open_file(f)
309 # Check for the hash entry, to not trigger a KeyError.
310 if not files[f].has_key(hash_key(hashname)):
311 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
315 # Actually check the hash for correctness.
316 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
317 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
319 except CantOpenError:
320 # TODO: This happens when the file is in the pool.
321 # warn("Cannot open file %s" % f)
328 ################################################################################
330 def check_size(where, files):
332 check_size checks the file sizes in the passed files dict against the
337 for f in files.keys():
342 # TODO: This happens when the file is in the pool.
346 actual_size = entry[stat.ST_SIZE]
347 size = int(files[f]["size"])
348 if size != actual_size:
349 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
350 % (f, actual_size, size, where))
353 ################################################################################
355 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
357 Verify that the files listed in the Files field of the .dsc are
358 those expected given the announced Format.
360 @type dsc_filename: string
361 @param dsc_filename: path of .dsc file
364 @param dsc: the content of the .dsc parsed by C{parse_changes()}
366 @type dsc_files: dict
367 @param dsc_files: the file list returned by C{build_file_list()}
370 @return: all errors detected
374 # Parse the file if needed
376 dsc = parse_changes(dsc_filename, signing_rules=1);
378 if dsc_files is None:
379 dsc_files = build_file_list(dsc, is_a_dsc=1)
381 # Ensure .dsc lists proper set of source files according to the format
383 has = defaultdict(lambda: 0)
386 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
387 (r'diff.gz', ('debian_diff',)),
388 (r'tar.gz', ('native_tar_gz', 'native_tar')),
389 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
390 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
391 (r'tar\.(gz|bz2)', ('native_tar',)),
392 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
395 for f in dsc_files.keys():
396 m = re_issource.match(f)
398 rejmsg.append("%s: %s in Files field not recognised as source."
402 # Populate 'has' dictionary by resolving keys in lookup table
404 for regex, keys in ftype_lookup:
405 if re.match(regex, m.group(3)):
411 # File does not match anything in lookup table; reject
413 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
415 # Check for multiple files
416 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
417 if has[file_type] > 1:
418 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
420 # Source format specific tests
422 format = get_format_from_string(dsc['format'])
424 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
427 except UnknownFormatError:
428 # Not an error here for now
433 ################################################################################
435 def check_hash_fields(what, manifest):
437 check_hash_fields ensures that there are no checksum fields in the
438 given dict that we do not know about.
442 hashes = map(lambda x: x[0], known_hashes)
443 for field in manifest:
444 if field.startswith("checksums-"):
445 hashname = field.split("-",1)[1]
446 if hashname not in hashes:
447 rejmsg.append("Unsupported checksum field for %s "\
448 "in %s" % (hashname, what))
451 ################################################################################
453 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
454 if format >= version:
455 # The version should contain the specified hash.
458 # Import hashes from the changes
459 rejmsg = parse_checksums(".changes", files, changes, hashname)
463 # We need to calculate the hash because it can't possibly
466 return func(".changes", files, hashname, hashfunc)
468 # We could add the orig which might be in the pool to the files dict to
469 # access the checksums easily.
471 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
473 ensure_dsc_hashes' task is to ensure that each and every *present* hash
474 in the dsc is correct, i.e. identical to the changes file and if necessary
475 the pool. The latter task is delegated to check_hash.
479 if not dsc.has_key('Checksums-%s' % (hashname,)):
481 # Import hashes from the dsc
482 parse_checksums(".dsc", dsc_files, dsc, hashname)
484 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
487 ################################################################################
489 def parse_checksums(where, files, manifest, hashname):
491 field = 'checksums-%s' % hashname
492 if not field in manifest:
494 for line in manifest[field].split('\n'):
497 clist = line.strip().split(' ')
499 checksum, size, checkfile = clist
501 rejmsg.append("Cannot parse checksum line [%s]" % (line))
503 if not files.has_key(checkfile):
504 # TODO: check for the file's entry in the original files dict, not
505 # the one modified by (auto)byhand and other weird stuff
506 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
507 # (file, hashname, where))
509 if not files[checkfile]["size"] == size:
510 rejmsg.append("%s: size differs for files and checksums-%s entry "\
511 "in %s" % (checkfile, hashname, where))
513 files[checkfile][hash_key(hashname)] = checksum
514 for f in files.keys():
515 if not files[f].has_key(hash_key(hashname)):
516 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
520 ################################################################################
522 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
524 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
527 # Make sure we have a Files: field to parse...
528 if not changes.has_key(field):
529 raise NoFilesFieldError
531 # Validate .changes Format: field
533 validate_changes_format(parse_format(changes['format']), field)
535 includes_section = (not is_a_dsc) and field == "files"
537 # Parse each entry/line:
538 for i in changes[field].split('\n'):
542 section = priority = ""
545 (md5, size, section, priority, name) = s
547 (md5, size, name) = s
549 raise ParseChangesError, i
556 (section, component) = extract_component_from_section(section)
558 files[name] = Dict(size=size, section=section,
559 priority=priority, component=component)
560 files[name][hashname] = md5
564 ################################################################################
566 def send_mail (message, filename=""):
567 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
569 # If we've been passed a string dump it into a temporary file
571 (fd, filename) = tempfile.mkstemp()
572 os.write (fd, message)
575 if Cnf.has_key("Dinstall::MailWhiteList") and \
576 Cnf["Dinstall::MailWhiteList"] != "":
577 message_in = open_file(filename)
578 message_raw = modemail.message_from_file(message_in)
582 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
584 for line in whitelist_in:
585 if not re_whitespace_comment.match(line):
586 if re_re_mark.match(line):
587 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
589 whitelist.append(re.compile(re.escape(line.strip())))
594 fields = ["To", "Bcc", "Cc"]
597 value = message_raw.get(field, None)
600 for item in value.split(","):
601 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
607 if not mail_whitelisted:
608 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
612 # Doesn't have any mail in whitelist so remove the header
614 del message_raw[field]
616 message_raw.replace_header(field, string.join(match, ", "))
618 # Change message fields in order if we don't have a To header
619 if not message_raw.has_key("To"):
622 if message_raw.has_key(field):
623 message_raw[fields[-1]] = message_raw[field]
624 del message_raw[field]
627 # Clean up any temporary files
628 # and return, as we removed all recipients.
630 os.unlink (filename);
633 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
634 os.write (fd, message_raw.as_string(True));
638 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
640 raise SendmailFailedError, output
642 # Clean up any temporary files
646 ################################################################################
648 def poolify (source, component):
651 if source[:3] == "lib":
652 return component + source[:4] + '/' + source + '/'
654 return component + source[:1] + '/' + source + '/'
656 ################################################################################
658 def move (src, dest, overwrite = 0, perms = 0664):
659 if os.path.exists(dest) and os.path.isdir(dest):
662 dest_dir = os.path.dirname(dest)
663 if not os.path.exists(dest_dir):
664 umask = os.umask(00000)
665 os.makedirs(dest_dir, 02775)
667 #print "Moving %s to %s..." % (src, dest)
668 if os.path.exists(dest) and os.path.isdir(dest):
669 dest += '/' + os.path.basename(src)
670 # Don't overwrite unless forced to
671 if os.path.exists(dest):
673 fubar("Can't move %s to %s - file already exists." % (src, dest))
675 if not os.access(dest, os.W_OK):
676 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
677 shutil.copy2(src, dest)
678 os.chmod(dest, perms)
681 def copy (src, dest, overwrite = 0, perms = 0664):
682 if os.path.exists(dest) and os.path.isdir(dest):
685 dest_dir = os.path.dirname(dest)
686 if not os.path.exists(dest_dir):
687 umask = os.umask(00000)
688 os.makedirs(dest_dir, 02775)
690 #print "Copying %s to %s..." % (src, dest)
691 if os.path.exists(dest) and os.path.isdir(dest):
692 dest += '/' + os.path.basename(src)
693 # Don't overwrite unless forced to
694 if os.path.exists(dest):
696 raise FileExistsError
698 if not os.access(dest, os.W_OK):
699 raise CantOverwriteError
700 shutil.copy2(src, dest)
701 os.chmod(dest, perms)
703 ################################################################################
706 res = socket.gethostbyaddr(socket.gethostname())
707 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
708 if database_hostname:
709 return database_hostname
713 def which_conf_file ():
714 res = socket.gethostbyaddr(socket.gethostname())
715 # In case we allow local config files per user, try if one exists
716 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
717 homedir = os.getenv("HOME")
718 confpath = os.path.join(homedir, "/etc/dak.conf")
719 if os.path.exists(confpath):
720 apt_pkg.ReadConfigFileISC(Cnf,default_config)
722 # We are still in here, so there is no local config file or we do
723 # not allow local files. Do the normal stuff.
724 if Cnf.get("Config::" + res[0] + "::DakConfig"):
725 return Cnf["Config::" + res[0] + "::DakConfig"]
727 return default_config
729 def which_apt_conf_file ():
730 res = socket.gethostbyaddr(socket.gethostname())
731 # In case we allow local config files per user, try if one exists
732 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
733 homedir = os.getenv("HOME")
734 confpath = os.path.join(homedir, "/etc/dak.conf")
735 if os.path.exists(confpath):
736 apt_pkg.ReadConfigFileISC(Cnf,default_config)
738 if Cnf.get("Config::" + res[0] + "::AptConfig"):
739 return Cnf["Config::" + res[0] + "::AptConfig"]
741 return default_apt_config
743 def which_alias_file():
744 hostname = socket.gethostbyaddr(socket.gethostname())[0]
745 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
746 if os.path.exists(aliasfn):
751 ################################################################################
753 def TemplateSubst(map, filename):
754 """ Perform a substition of template """
755 templatefile = open_file(filename)
756 template = templatefile.read()
758 template = template.replace(x, str(map[x]))
762 ################################################################################
764 def fubar(msg, exit_code=1):
765 sys.stderr.write("E: %s\n" % (msg))
769 sys.stderr.write("W: %s\n" % (msg))
771 ################################################################################
773 # Returns the user name with a laughable attempt at rfc822 conformancy
774 # (read: removing stray periods).
776 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
779 return pwd.getpwuid(os.getuid())[0]
781 ################################################################################
791 return ("%d%s" % (c, t))
793 ################################################################################
795 def cc_fix_changes (changes):
796 o = changes.get("architecture", "")
798 del changes["architecture"]
799 changes["architecture"] = {}
801 changes["architecture"][j] = 1
803 def changes_compare (a, b):
804 """ Sort by source name, source version, 'have source', and then by filename """
806 a_changes = parse_changes(a)
811 b_changes = parse_changes(b)
815 cc_fix_changes (a_changes)
816 cc_fix_changes (b_changes)
818 # Sort by source name
819 a_source = a_changes.get("source")
820 b_source = b_changes.get("source")
821 q = cmp (a_source, b_source)
825 # Sort by source version
826 a_version = a_changes.get("version", "0")
827 b_version = b_changes.get("version", "0")
828 q = apt_pkg.VersionCompare(a_version, b_version)
832 # Sort by 'have source'
833 a_has_source = a_changes["architecture"].get("source")
834 b_has_source = b_changes["architecture"].get("source")
835 if a_has_source and not b_has_source:
837 elif b_has_source and not a_has_source:
840 # Fall back to sort by filename
843 ################################################################################
845 def find_next_free (dest, too_many=100):
848 while os.path.exists(dest) and extra < too_many:
849 dest = orig_dest + '.' + repr(extra)
851 if extra >= too_many:
852 raise NoFreeFilenameError
855 ################################################################################
857 def result_join (original, sep = '\t'):
859 for i in xrange(len(original)):
860 if original[i] == None:
861 resultlist.append("")
863 resultlist.append(original[i])
864 return sep.join(resultlist)
866 ################################################################################
868 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
870 for line in str.split('\n'):
872 if line or include_blank_lines:
873 out += "%s%s\n" % (prefix, line)
874 # Strip trailing new line
879 ################################################################################
881 def validate_changes_file_arg(filename, require_changes=1):
883 'filename' is either a .changes or .dak file. If 'filename' is a
884 .dak file, it's changed to be the corresponding .changes file. The
885 function then checks if the .changes file a) exists and b) is
886 readable and returns the .changes filename if so. If there's a
887 problem, the next action depends on the option 'require_changes'
890 - If 'require_changes' == -1, errors are ignored and the .changes
891 filename is returned.
892 - If 'require_changes' == 0, a warning is given and 'None' is returned.
893 - If 'require_changes' == 1, a fatal error is raised.
898 orig_filename = filename
899 if filename.endswith(".dak"):
900 filename = filename[:-4]+".changes"
902 if not filename.endswith(".changes"):
903 error = "invalid file type; not a changes file"
905 if not os.access(filename,os.R_OK):
906 if os.path.exists(filename):
907 error = "permission denied"
909 error = "file not found"
912 if require_changes == 1:
913 fubar("%s: %s." % (orig_filename, error))
914 elif require_changes == 0:
915 warn("Skipping %s - %s" % (orig_filename, error))
917 else: # We only care about the .dak file
922 ################################################################################
925 return (arch != "source" and arch != "all")
927 ################################################################################
929 def join_with_commas_and(list):
930 if len(list) == 0: return "nothing"
931 if len(list) == 1: return list[0]
932 return ", ".join(list[:-1]) + " and " + list[-1]
934 ################################################################################
939 (pkg, version, constraint) = atom
941 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
944 pp_deps.append(pp_dep)
945 return " |".join(pp_deps)
947 ################################################################################
952 ################################################################################
954 def parse_args(Options):
955 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
956 # XXX: This should go away and everything which calls it be converted
957 # to use SQLA properly. For now, we'll just fix it not to use
958 # the old Pg interface though
959 session = DBConn().session()
963 for suitename in split_args(Options["Suite"]):
964 suite = get_suite(suitename, session=session)
965 if suite.suite_id is None:
966 warn("suite '%s' not recognised." % (suite.suite_name))
968 suite_ids_list.append(suite.suite_id)
970 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
972 fubar("No valid suite given.")
977 if Options["Component"]:
978 component_ids_list = []
979 for componentname in split_args(Options["Component"]):
980 component = get_component(componentname, session=session)
981 if component is None:
982 warn("component '%s' not recognised." % (componentname))
984 component_ids_list.append(component.component_id)
985 if component_ids_list:
986 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
988 fubar("No valid component given.")
992 # Process architecture
993 con_architectures = ""
995 if Options["Architecture"]:
997 for archname in split_args(Options["Architecture"]):
998 if archname == "source":
1001 arch = get_architecture(archname, session=session)
1003 warn("architecture '%s' not recognised." % (archname))
1005 arch_ids_list.append(arch.arch_id)
1007 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1009 if not check_source:
1010 fubar("No valid architecture given.")
1014 return (con_suites, con_architectures, con_components, check_source)
1016 ################################################################################
1018 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1019 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1022 tb = sys.exc_info()[2]
1029 frame = frame.f_back
1031 traceback.print_exc()
1033 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1034 frame.f_code.co_filename,
1036 for key, value in frame.f_locals.items():
1037 print "\t%20s = " % key,
1041 print "<unable to print>"
1043 ################################################################################
1045 def try_with_debug(function):
1053 ################################################################################
1055 def arch_compare_sw (a, b):
1057 Function for use in sorting lists of architectures.
1059 Sorts normally except that 'source' dominates all others.
1062 if a == "source" and b == "source":
1071 ################################################################################
1073 def split_args (s, dwim=1):
1075 Split command line arguments which can be separated by either commas
1076 or whitespace. If dwim is set, it will complain about string ending
1077 in comma since this usually means someone did 'dak ls -a i386, m68k
1078 foo' or something and the inevitable confusion resulting from 'm68k'
1079 being treated as an argument is undesirable.
1082 if s.find(",") == -1:
1085 if s[-1:] == "," and dwim:
1086 fubar("split_args: found trailing comma, spurious space maybe?")
1089 ################################################################################
1091 def Dict(**dict): return dict
1093 ########################################
1095 def gpgv_get_status_output(cmd, status_read, status_write):
1097 Our very own version of commands.getouputstatus(), hacked to support
1101 cmd = ['/bin/sh', '-c', cmd]
1102 p2cread, p2cwrite = os.pipe()
1103 c2pread, c2pwrite = os.pipe()
1104 errout, errin = os.pipe()
1114 for i in range(3, 256):
1115 if i != status_write:
1121 os.execvp(cmd[0], cmd)
1127 os.dup2(c2pread, c2pwrite)
1128 os.dup2(errout, errin)
1130 output = status = ""
1132 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1135 r = os.read(fd, 8196)
1137 more_data.append(fd)
1138 if fd == c2pwrite or fd == errin:
1140 elif fd == status_read:
1143 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1145 pid, exit_status = os.waitpid(pid, 0)
1147 os.close(status_write)
1148 os.close(status_read)
1158 return output, status, exit_status
1160 ################################################################################
1162 def process_gpgv_output(status):
1163 # Process the status-fd output
1166 for line in status.split('\n'):
1170 split = line.split()
1172 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1174 (gnupg, keyword) = split[:2]
1175 if gnupg != "[GNUPG:]":
1176 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1179 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1180 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1183 keywords[keyword] = args
1185 return (keywords, internal_error)
1187 ################################################################################
1189 def retrieve_key (filename, keyserver=None, keyring=None):
1191 Retrieve the key that signed 'filename' from 'keyserver' and
1192 add it to 'keyring'. Returns nothing on success, or an error message
1196 # Defaults for keyserver and keyring
1198 keyserver = Cnf["Dinstall::KeyServer"]
1200 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1202 # Ensure the filename contains no shell meta-characters or other badness
1203 if not re_taint_free.match(filename):
1204 return "%s: tainted filename" % (filename)
1206 # Invoke gpgv on the file
1207 status_read, status_write = os.pipe()
1208 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1209 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1211 # Process the status-fd output
1212 (keywords, internal_error) = process_gpgv_output(status)
1214 return internal_error
1216 if not keywords.has_key("NO_PUBKEY"):
1217 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1219 fingerprint = keywords["NO_PUBKEY"][0]
1220 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1221 # it'll try to create a lockfile in /dev. A better solution might
1222 # be a tempfile or something.
1223 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1224 % (Cnf["Dinstall::SigningKeyring"])
1225 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1226 % (keyring, keyserver, fingerprint)
1227 (result, output) = commands.getstatusoutput(cmd)
1229 return "'%s' failed with exit code %s" % (cmd, result)
1233 ################################################################################
1235 def gpg_keyring_args(keyrings=None):
1237 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1239 return " ".join(["--keyring %s" % x for x in keyrings])
1241 ################################################################################
1243 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1245 Check the signature of a file and return the fingerprint if the
1246 signature is valid or 'None' if it's not. The first argument is the
1247 filename whose signature should be checked. The second argument is a
1248 reject function and is called when an error is found. The reject()
1249 function must allow for two arguments: the first is the error message,
1250 the second is an optional prefix string. It's possible for reject()
1251 to be called more than once during an invocation of check_signature().
1252 The third argument is optional and is the name of the files the
1253 detached signature applies to. The fourth argument is optional and is
1254 a *list* of keyrings to use. 'autofetch' can either be None, True or
1255 False. If None, the default behaviour specified in the config will be
1261 # Ensure the filename contains no shell meta-characters or other badness
1262 if not re_taint_free.match(sig_filename):
1263 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1264 return (None, rejects)
1266 if data_filename and not re_taint_free.match(data_filename):
1267 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1268 return (None, rejects)
1271 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1273 # Autofetch the signing key if that's enabled
1274 if autofetch == None:
1275 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1277 error_msg = retrieve_key(sig_filename)
1279 rejects.append(error_msg)
1280 return (None, rejects)
1282 # Build the command line
1283 status_read, status_write = os.pipe()
1284 cmd = "gpgv --status-fd %s %s %s %s" % (
1285 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1287 # Invoke gpgv on the file
1288 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1290 # Process the status-fd output
1291 (keywords, internal_error) = process_gpgv_output(status)
1293 # If we failed to parse the status-fd output, let's just whine and bail now
1295 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1296 rejects.append(internal_error, "")
1297 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1298 return (None, rejects)
1300 # Now check for obviously bad things in the processed output
1301 if keywords.has_key("KEYREVOKED"):
1302 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1303 if keywords.has_key("BADSIG"):
1304 rejects.append("bad signature on %s." % (sig_filename))
1305 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1306 rejects.append("failed to check signature on %s." % (sig_filename))
1307 if keywords.has_key("NO_PUBKEY"):
1308 args = keywords["NO_PUBKEY"]
1311 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1312 if keywords.has_key("BADARMOR"):
1313 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1314 if keywords.has_key("NODATA"):
1315 rejects.append("no signature found in %s." % (sig_filename))
1316 if keywords.has_key("EXPKEYSIG"):
1317 args = keywords["EXPKEYSIG"]
1320 rejects.append("Signature made by expired key 0x%s" % (key))
1321 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1322 args = keywords["KEYEXPIRED"]
1326 if timestamp.count("T") == 0:
1328 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1330 expiredate = "unknown (%s)" % (timestamp)
1332 expiredate = timestamp
1333 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1335 if len(rejects) > 0:
1336 return (None, rejects)
1338 # Next check gpgv exited with a zero return code
1340 rejects.append("gpgv failed while checking %s." % (sig_filename))
1342 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1344 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1345 return (None, rejects)
1347 # Sanity check the good stuff we expect
1348 if not keywords.has_key("VALIDSIG"):
1349 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1351 args = keywords["VALIDSIG"]
1353 rejects.append("internal error while checking signature on %s." % (sig_filename))
1355 fingerprint = args[0]
1356 if not keywords.has_key("GOODSIG"):
1357 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1358 if not keywords.has_key("SIG_ID"):
1359 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1361 # Finally ensure there's not something we don't recognise
1362 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1363 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1364 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1366 for keyword in keywords.keys():
1367 if not known_keywords.has_key(keyword):
1368 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1370 if len(rejects) > 0:
1371 return (None, rejects)
1373 return (fingerprint, [])
1375 ################################################################################
1377 def gpg_get_key_addresses(fingerprint):
1378 """retreive email addresses from gpg key uids for a given fingerprint"""
1379 addresses = key_uid_email_cache.get(fingerprint)
1380 if addresses != None:
1383 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1384 % (gpg_keyring_args(), fingerprint)
1385 (result, output) = commands.getstatusoutput(cmd)
1387 for l in output.split('\n'):
1388 m = re_gpg_uid.match(l)
1390 addresses.add(m.group(1))
1391 key_uid_email_cache[fingerprint] = addresses
1394 ################################################################################
1396 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1398 def wrap(paragraph, max_length, prefix=""):
1402 words = paragraph.split()
1405 word_size = len(word)
1406 if word_size > max_length:
1408 s += line + '\n' + prefix
1409 s += word + '\n' + prefix
1412 new_length = len(line) + word_size + 1
1413 if new_length > max_length:
1414 s += line + '\n' + prefix
1427 ################################################################################
1429 def clean_symlink (src, dest, root):
1431 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1434 src = src.replace(root, '', 1)
1435 dest = dest.replace(root, '', 1)
1436 dest = os.path.dirname(dest)
1437 new_src = '../' * len(dest.split('/'))
1438 return new_src + src
1440 ################################################################################
1442 def temp_filename(directory=None, prefix="dak", suffix=""):
1444 Return a secure and unique filename by pre-creating it.
1445 If 'directory' is non-null, it will be the directory the file is pre-created in.
1446 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1447 If 'suffix' is non-null, the filename will end with it.
1449 Returns a pair (fd, name).
1452 return tempfile.mkstemp(suffix, prefix, directory)
1454 ################################################################################
1456 def temp_dirname(parent=None, prefix="dak", suffix=""):
1458 Return a secure and unique directory by pre-creating it.
1459 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1460 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1461 If 'suffix' is non-null, the filename will end with it.
1463 Returns a pathname to the new directory
1466 return tempfile.mkdtemp(suffix, prefix, parent)
1468 ################################################################################
1470 def is_email_alias(email):
1471 """ checks if the user part of the email is listed in the alias file """
1473 if alias_cache == None:
1474 aliasfn = which_alias_file()
1477 for l in open(aliasfn):
1478 alias_cache.add(l.split(':')[0])
1479 uid = email.split('@')[0]
1480 return uid in alias_cache
1482 ################################################################################
1484 def get_changes_files(dir):
1486 Takes a directory and lists all .changes files in it (as well as chdir'ing
1487 to the directory; this is due to broken behaviour on the part of p-u/p-a
1488 when you're not in the right place)
1490 Returns a list of filenames
1493 # Much of the rest of p-u/p-a depends on being in the right place
1495 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1497 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1499 return changes_files
1501 ################################################################################
1505 Cnf = apt_pkg.newConfiguration()
1506 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1508 if which_conf_file() != default_config:
1509 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1511 ###############################################################################
1513 def ensure_orig_files(changes, dest_dir, session):
1515 Ensure that dest_dir contains all the orig tarballs for the specified
1516 changes. If it does not, symlink them into place.
1518 Returns a 2-tuple (already_exists, symlinked) containing a list of files
1519 that were already there and a list of files that were symlinked into place.
1522 exists, symlinked = [], []
1524 for dsc_file in changes.dsc_files:
1526 # Skip all files that are not orig tarballs
1527 if not re_is_orig_source.match(dsc_file):
1530 # Skip orig files not identified in the pool
1531 if not (dsc_file in changes.orig_files and
1532 'id' in changes.orig_files[dsc_file]):
1535 dest = os.path.join(dest_dir, dsc_file)
1537 if os.path.exists(dest):
1541 orig_file_id = changes.orig_files[dsc_file]['id']
1543 c = session.execute(
1544 'SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id',
1545 {'id': orig_file_id}
1550 return "[INTERNAL ERROR] Couldn't find id %s in files table." % orig_file_id
1552 src = os.path.join(res[0], res[1])
1553 os.symlink(src, dest)
1554 symlinked.append(dest)
1556 return (exists, symlinked)