2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from formats import parse_format, validate_changes_format
52 from srcformats import get_format_from_string
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
68 # all situations under lenny's Python.
70 def dak_getstatusoutput(cmd):
71 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
72 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
74 output = "".join(pipe.stdout.readlines())
76 if output[-1:] == '\n':
84 commands.getstatusoutput = dak_getstatusoutput
86 ################################################################################
89 """ Escape html chars """
90 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
92 ################################################################################
94 def open_file(filename, mode='r'):
96 Open C{file}, return fileobject.
98 @type filename: string
99 @param filename: path/filename to open
102 @param mode: open mode
105 @return: open fileobject
107 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
111 f = open(filename, mode)
113 raise CantOpenError, filename
116 ################################################################################
118 def our_raw_input(prompt=""):
120 sys.stdout.write(prompt)
126 sys.stderr.write("\nUser interrupt (^D).\n")
129 ################################################################################
131 def extract_component_from_section(section):
134 if section.find('/') != -1:
135 component = section.split('/')[0]
137 # Expand default component
139 if Cnf.has_key("Component::%s" % section):
144 return (section, component)
146 ################################################################################
148 def parse_deb822(contents, signing_rules=0):
152 # Split the lines in the input, keeping the linebreaks.
153 lines = contents.splitlines(True)
156 raise ParseChangesError, "[Empty changes file]"
158 # Reindex by line number so we can easily verify the format of
164 indexed_lines[index] = line[:-1]
168 num_of_lines = len(indexed_lines.keys())
171 while index < num_of_lines:
173 line = indexed_lines[index]
175 if signing_rules == 1:
177 if index > num_of_lines:
178 raise InvalidDscError, index
179 line = indexed_lines[index]
180 if not line.startswith("-----BEGIN PGP SIGNATURE"):
181 raise InvalidDscError, index
186 if line.startswith("-----BEGIN PGP SIGNATURE"):
188 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
190 if signing_rules == 1:
191 while index < num_of_lines and line != "":
193 line = indexed_lines[index]
195 # If we're not inside the signed data, don't process anything
196 if signing_rules >= 0 and not inside_signature:
198 slf = re_single_line_field.match(line)
200 field = slf.groups()[0].lower()
201 changes[field] = slf.groups()[1]
205 changes[field] += '\n'
207 mlf = re_multi_line_field.match(line)
210 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
211 if first == 1 and changes[field] != "":
212 changes[field] += '\n'
214 changes[field] += mlf.groups()[0] + '\n'
218 if signing_rules == 1 and inside_signature:
219 raise InvalidDscError, index
221 changes["filecontents"] = "".join(lines)
223 if changes.has_key("source"):
224 # Strip the source version in brackets from the source field,
225 # put it in the "source-version" field instead.
226 srcver = re_srchasver.search(changes["source"])
228 changes["source"] = srcver.group(1)
229 changes["source-version"] = srcver.group(2)
232 raise ParseChangesError, error
236 ################################################################################
238 def parse_changes(filename, signing_rules=0):
240 Parses a changes file and returns a dictionary where each field is a
241 key. The mandatory first argument is the filename of the .changes
244 signing_rules is an optional argument:
246 - If signing_rules == -1, no signature is required.
247 - If signing_rules == 0 (the default), a signature is required.
248 - If signing_rules == 1, it turns on the same strict format checking
251 The rules for (signing_rules == 1)-mode are:
253 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
254 followed by any PGP header data and must end with a blank line.
256 - The data section must end with a blank line and must be followed by
257 "-----BEGIN PGP SIGNATURE-----".
260 changes_in = open_file(filename)
261 content = changes_in.read()
264 unicode(content, 'utf-8')
266 raise ChangesUnicodeError, "Changes file not proper utf-8"
267 return parse_deb822(content, signing_rules)
269 ################################################################################
271 def hash_key(hashname):
272 return '%ssum' % hashname
274 ################################################################################
276 def create_hash(where, files, hashname, hashfunc):
278 create_hash extends the passed files dict with the given hash by
279 iterating over all files on disk and passing them to the hashing
284 for f in files.keys():
286 file_handle = open_file(f)
287 except CantOpenError:
288 rejmsg.append("Could not open file %s for checksumming" % (f))
291 files[f][hash_key(hashname)] = hashfunc(file_handle)
296 ################################################################################
298 def check_hash(where, files, hashname, hashfunc):
300 check_hash checks the given hash in the files dict against the actual
301 files on disk. The hash values need to be present consistently in
302 all file entries. It does not modify its input in any way.
306 for f in files.keys():
310 file_handle = open_file(f)
312 # Check for the hash entry, to not trigger a KeyError.
313 if not files[f].has_key(hash_key(hashname)):
314 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
318 # Actually check the hash for correctness.
319 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
320 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
322 except CantOpenError:
323 # TODO: This happens when the file is in the pool.
324 # warn("Cannot open file %s" % f)
331 ################################################################################
333 def check_size(where, files):
335 check_size checks the file sizes in the passed files dict against the
340 for f in files.keys():
345 # TODO: This happens when the file is in the pool.
349 actual_size = entry[stat.ST_SIZE]
350 size = int(files[f]["size"])
351 if size != actual_size:
352 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
353 % (f, actual_size, size, where))
356 ################################################################################
358 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
360 Verify that the files listed in the Files field of the .dsc are
361 those expected given the announced Format.
363 @type dsc_filename: string
364 @param dsc_filename: path of .dsc file
367 @param dsc: the content of the .dsc parsed by C{parse_changes()}
369 @type dsc_files: dict
370 @param dsc_files: the file list returned by C{build_file_list()}
373 @return: all errors detected
377 # Parse the file if needed
379 dsc = parse_changes(dsc_filename, signing_rules=1);
381 if dsc_files is None:
382 dsc_files = build_file_list(dsc, is_a_dsc=1)
384 # Ensure .dsc lists proper set of source files according to the format
386 has = defaultdict(lambda: 0)
389 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
390 (r'diff.gz', ('debian_diff',)),
391 (r'tar.gz', ('native_tar_gz', 'native_tar')),
392 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
393 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
394 (r'tar\.(gz|bz2)', ('native_tar',)),
395 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
398 for f in dsc_files.keys():
399 m = re_issource.match(f)
401 rejmsg.append("%s: %s in Files field not recognised as source."
405 # Populate 'has' dictionary by resolving keys in lookup table
407 for regex, keys in ftype_lookup:
408 if re.match(regex, m.group(3)):
414 # File does not match anything in lookup table; reject
416 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
418 # Check for multiple files
419 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
420 if has[file_type] > 1:
421 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
423 # Source format specific tests
425 format = get_format_from_string(dsc['format'])
427 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
430 except UnknownFormatError:
431 # Not an error here for now
436 ################################################################################
438 def check_hash_fields(what, manifest):
440 check_hash_fields ensures that there are no checksum fields in the
441 given dict that we do not know about.
445 hashes = map(lambda x: x[0], known_hashes)
446 for field in manifest:
447 if field.startswith("checksums-"):
448 hashname = field.split("-",1)[1]
449 if hashname not in hashes:
450 rejmsg.append("Unsupported checksum field for %s "\
451 "in %s" % (hashname, what))
454 ################################################################################
456 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
457 if format >= version:
458 # The version should contain the specified hash.
461 # Import hashes from the changes
462 rejmsg = parse_checksums(".changes", files, changes, hashname)
466 # We need to calculate the hash because it can't possibly
469 return func(".changes", files, hashname, hashfunc)
471 # We could add the orig which might be in the pool to the files dict to
472 # access the checksums easily.
474 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
476 ensure_dsc_hashes' task is to ensure that each and every *present* hash
477 in the dsc is correct, i.e. identical to the changes file and if necessary
478 the pool. The latter task is delegated to check_hash.
482 if not dsc.has_key('Checksums-%s' % (hashname,)):
484 # Import hashes from the dsc
485 parse_checksums(".dsc", dsc_files, dsc, hashname)
487 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
490 ################################################################################
492 def parse_checksums(where, files, manifest, hashname):
494 field = 'checksums-%s' % hashname
495 if not field in manifest:
497 for line in manifest[field].split('\n'):
500 clist = line.strip().split(' ')
502 checksum, size, checkfile = clist
504 rejmsg.append("Cannot parse checksum line [%s]" % (line))
506 if not files.has_key(checkfile):
507 # TODO: check for the file's entry in the original files dict, not
508 # the one modified by (auto)byhand and other weird stuff
509 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
510 # (file, hashname, where))
512 if not files[checkfile]["size"] == size:
513 rejmsg.append("%s: size differs for files and checksums-%s entry "\
514 "in %s" % (checkfile, hashname, where))
516 files[checkfile][hash_key(hashname)] = checksum
517 for f in files.keys():
518 if not files[f].has_key(hash_key(hashname)):
519 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
523 ################################################################################
525 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
527 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
530 # Make sure we have a Files: field to parse...
531 if not changes.has_key(field):
532 raise NoFilesFieldError
534 # Validate .changes Format: field
536 validate_changes_format(parse_format(changes['format']), field)
538 includes_section = (not is_a_dsc) and field == "files"
540 # Parse each entry/line:
541 for i in changes[field].split('\n'):
545 section = priority = ""
548 (md5, size, section, priority, name) = s
550 (md5, size, name) = s
552 raise ParseChangesError, i
559 (section, component) = extract_component_from_section(section)
561 files[name] = Dict(size=size, section=section,
562 priority=priority, component=component)
563 files[name][hashname] = md5
567 ################################################################################
569 def send_mail (message, filename=""):
570 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
572 # If we've been passed a string dump it into a temporary file
574 (fd, filename) = tempfile.mkstemp()
575 os.write (fd, message)
578 if Cnf.has_key("Dinstall::MailWhiteList") and \
579 Cnf["Dinstall::MailWhiteList"] != "":
580 message_in = open_file(filename)
581 message_raw = modemail.message_from_file(message_in)
585 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
587 for line in whitelist_in:
588 if not re_whitespace_comment.match(line):
589 if re_re_mark.match(line):
590 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
592 whitelist.append(re.compile(re.escape(line.strip())))
597 fields = ["To", "Bcc", "Cc"]
600 value = message_raw.get(field, None)
603 for item in value.split(","):
604 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
610 if not mail_whitelisted:
611 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
615 # Doesn't have any mail in whitelist so remove the header
617 del message_raw[field]
619 message_raw.replace_header(field, string.join(match, ", "))
621 # Change message fields in order if we don't have a To header
622 if not message_raw.has_key("To"):
625 if message_raw.has_key(field):
626 message_raw[fields[-1]] = message_raw[field]
627 del message_raw[field]
630 # Clean up any temporary files
631 # and return, as we removed all recipients.
633 os.unlink (filename);
636 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
637 os.write (fd, message_raw.as_string(True));
641 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
643 raise SendmailFailedError, output
645 # Clean up any temporary files
649 ################################################################################
651 def poolify (source, component):
654 if source[:3] == "lib":
655 return component + source[:4] + '/' + source + '/'
657 return component + source[:1] + '/' + source + '/'
659 ################################################################################
661 def move (src, dest, overwrite = 0, perms = 0664):
662 if os.path.exists(dest) and os.path.isdir(dest):
665 dest_dir = os.path.dirname(dest)
666 if not os.path.exists(dest_dir):
667 umask = os.umask(00000)
668 os.makedirs(dest_dir, 02775)
670 #print "Moving %s to %s..." % (src, dest)
671 if os.path.exists(dest) and os.path.isdir(dest):
672 dest += '/' + os.path.basename(src)
673 # Don't overwrite unless forced to
674 if os.path.exists(dest):
676 fubar("Can't move %s to %s - file already exists." % (src, dest))
678 if not os.access(dest, os.W_OK):
679 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
680 shutil.copy2(src, dest)
681 os.chmod(dest, perms)
684 def copy (src, dest, overwrite = 0, perms = 0664):
685 if os.path.exists(dest) and os.path.isdir(dest):
688 dest_dir = os.path.dirname(dest)
689 if not os.path.exists(dest_dir):
690 umask = os.umask(00000)
691 os.makedirs(dest_dir, 02775)
693 #print "Copying %s to %s..." % (src, dest)
694 if os.path.exists(dest) and os.path.isdir(dest):
695 dest += '/' + os.path.basename(src)
696 # Don't overwrite unless forced to
697 if os.path.exists(dest):
699 raise FileExistsError
701 if not os.access(dest, os.W_OK):
702 raise CantOverwriteError
703 shutil.copy2(src, dest)
704 os.chmod(dest, perms)
706 ################################################################################
709 res = socket.gethostbyaddr(socket.gethostname())
710 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
711 if database_hostname:
712 return database_hostname
716 def which_conf_file ():
717 if os.getenv("DAK_CONFIG"):
718 print(os.getenv("DAK_CONFIG"))
719 return os.getenv("DAK_CONFIG")
721 res = socket.gethostbyaddr(socket.gethostname())
722 # In case we allow local config files per user, try if one exists
723 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
724 homedir = os.getenv("HOME")
725 confpath = os.path.join(homedir, "/etc/dak.conf")
726 if os.path.exists(confpath):
727 apt_pkg.ReadConfigFileISC(Cnf,default_config)
729 # We are still in here, so there is no local config file or we do
730 # not allow local files. Do the normal stuff.
731 if Cnf.get("Config::" + res[0] + "::DakConfig"):
732 return Cnf["Config::" + res[0] + "::DakConfig"]
734 return default_config
736 def which_apt_conf_file ():
737 res = socket.gethostbyaddr(socket.gethostname())
738 # In case we allow local config files per user, try if one exists
739 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
740 homedir = os.getenv("HOME")
741 confpath = os.path.join(homedir, "/etc/dak.conf")
742 if os.path.exists(confpath):
743 apt_pkg.ReadConfigFileISC(Cnf,default_config)
745 if Cnf.get("Config::" + res[0] + "::AptConfig"):
746 return Cnf["Config::" + res[0] + "::AptConfig"]
748 return default_apt_config
750 def which_alias_file():
751 hostname = socket.gethostbyaddr(socket.gethostname())[0]
752 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
753 if os.path.exists(aliasfn):
758 ################################################################################
760 def TemplateSubst(map, filename):
761 """ Perform a substition of template """
762 templatefile = open_file(filename)
763 template = templatefile.read()
765 template = template.replace(x, str(map[x]))
769 ################################################################################
771 def fubar(msg, exit_code=1):
772 sys.stderr.write("E: %s\n" % (msg))
776 sys.stderr.write("W: %s\n" % (msg))
778 ################################################################################
780 # Returns the user name with a laughable attempt at rfc822 conformancy
781 # (read: removing stray periods).
783 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
786 return pwd.getpwuid(os.getuid())[0]
788 ################################################################################
798 return ("%d%s" % (c, t))
800 ################################################################################
802 def cc_fix_changes (changes):
803 o = changes.get("architecture", "")
805 del changes["architecture"]
806 changes["architecture"] = {}
808 changes["architecture"][j] = 1
810 def changes_compare (a, b):
811 """ Sort by source name, source version, 'have source', and then by filename """
813 a_changes = parse_changes(a)
818 b_changes = parse_changes(b)
822 cc_fix_changes (a_changes)
823 cc_fix_changes (b_changes)
825 # Sort by source name
826 a_source = a_changes.get("source")
827 b_source = b_changes.get("source")
828 q = cmp (a_source, b_source)
832 # Sort by source version
833 a_version = a_changes.get("version", "0")
834 b_version = b_changes.get("version", "0")
835 q = apt_pkg.VersionCompare(a_version, b_version)
839 # Sort by 'have source'
840 a_has_source = a_changes["architecture"].get("source")
841 b_has_source = b_changes["architecture"].get("source")
842 if a_has_source and not b_has_source:
844 elif b_has_source and not a_has_source:
847 # Fall back to sort by filename
850 ################################################################################
852 def find_next_free (dest, too_many=100):
855 while os.path.exists(dest) and extra < too_many:
856 dest = orig_dest + '.' + repr(extra)
858 if extra >= too_many:
859 raise NoFreeFilenameError
862 ################################################################################
864 def result_join (original, sep = '\t'):
866 for i in xrange(len(original)):
867 if original[i] == None:
868 resultlist.append("")
870 resultlist.append(original[i])
871 return sep.join(resultlist)
873 ################################################################################
875 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
877 for line in str.split('\n'):
879 if line or include_blank_lines:
880 out += "%s%s\n" % (prefix, line)
881 # Strip trailing new line
886 ################################################################################
888 def validate_changes_file_arg(filename, require_changes=1):
890 'filename' is either a .changes or .dak file. If 'filename' is a
891 .dak file, it's changed to be the corresponding .changes file. The
892 function then checks if the .changes file a) exists and b) is
893 readable and returns the .changes filename if so. If there's a
894 problem, the next action depends on the option 'require_changes'
897 - If 'require_changes' == -1, errors are ignored and the .changes
898 filename is returned.
899 - If 'require_changes' == 0, a warning is given and 'None' is returned.
900 - If 'require_changes' == 1, a fatal error is raised.
905 orig_filename = filename
906 if filename.endswith(".dak"):
907 filename = filename[:-4]+".changes"
909 if not filename.endswith(".changes"):
910 error = "invalid file type; not a changes file"
912 if not os.access(filename,os.R_OK):
913 if os.path.exists(filename):
914 error = "permission denied"
916 error = "file not found"
919 if require_changes == 1:
920 fubar("%s: %s." % (orig_filename, error))
921 elif require_changes == 0:
922 warn("Skipping %s - %s" % (orig_filename, error))
924 else: # We only care about the .dak file
929 ################################################################################
932 return (arch != "source" and arch != "all")
934 ################################################################################
936 def join_with_commas_and(list):
937 if len(list) == 0: return "nothing"
938 if len(list) == 1: return list[0]
939 return ", ".join(list[:-1]) + " and " + list[-1]
941 ################################################################################
946 (pkg, version, constraint) = atom
948 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
951 pp_deps.append(pp_dep)
952 return " |".join(pp_deps)
954 ################################################################################
959 ################################################################################
961 def parse_args(Options):
962 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
963 # XXX: This should go away and everything which calls it be converted
964 # to use SQLA properly. For now, we'll just fix it not to use
965 # the old Pg interface though
966 session = DBConn().session()
970 for suitename in split_args(Options["Suite"]):
971 suite = get_suite(suitename, session=session)
972 if suite.suite_id is None:
973 warn("suite '%s' not recognised." % (suite.suite_name))
975 suite_ids_list.append(suite.suite_id)
977 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
979 fubar("No valid suite given.")
984 if Options["Component"]:
985 component_ids_list = []
986 for componentname in split_args(Options["Component"]):
987 component = get_component(componentname, session=session)
988 if component is None:
989 warn("component '%s' not recognised." % (componentname))
991 component_ids_list.append(component.component_id)
992 if component_ids_list:
993 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
995 fubar("No valid component given.")
999 # Process architecture
1000 con_architectures = ""
1002 if Options["Architecture"]:
1004 for archname in split_args(Options["Architecture"]):
1005 if archname == "source":
1008 arch = get_architecture(archname, session=session)
1010 warn("architecture '%s' not recognised." % (archname))
1012 arch_ids_list.append(arch.arch_id)
1014 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1016 if not check_source:
1017 fubar("No valid architecture given.")
1021 return (con_suites, con_architectures, con_components, check_source)
1023 ################################################################################
1025 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1026 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1029 tb = sys.exc_info()[2]
1036 frame = frame.f_back
1038 traceback.print_exc()
1040 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1041 frame.f_code.co_filename,
1043 for key, value in frame.f_locals.items():
1044 print "\t%20s = " % key,
1048 print "<unable to print>"
1050 ################################################################################
1052 def try_with_debug(function):
1060 ################################################################################
1062 def arch_compare_sw (a, b):
1064 Function for use in sorting lists of architectures.
1066 Sorts normally except that 'source' dominates all others.
1069 if a == "source" and b == "source":
1078 ################################################################################
1080 def split_args (s, dwim=1):
1082 Split command line arguments which can be separated by either commas
1083 or whitespace. If dwim is set, it will complain about string ending
1084 in comma since this usually means someone did 'dak ls -a i386, m68k
1085 foo' or something and the inevitable confusion resulting from 'm68k'
1086 being treated as an argument is undesirable.
1089 if s.find(",") == -1:
1092 if s[-1:] == "," and dwim:
1093 fubar("split_args: found trailing comma, spurious space maybe?")
1096 ################################################################################
1098 def Dict(**dict): return dict
1100 ########################################
1102 def gpgv_get_status_output(cmd, status_read, status_write):
1104 Our very own version of commands.getouputstatus(), hacked to support
1108 cmd = ['/bin/sh', '-c', cmd]
1109 p2cread, p2cwrite = os.pipe()
1110 c2pread, c2pwrite = os.pipe()
1111 errout, errin = os.pipe()
1121 for i in range(3, 256):
1122 if i != status_write:
1128 os.execvp(cmd[0], cmd)
1134 os.dup2(c2pread, c2pwrite)
1135 os.dup2(errout, errin)
1137 output = status = ""
1139 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1142 r = os.read(fd, 8196)
1144 more_data.append(fd)
1145 if fd == c2pwrite or fd == errin:
1147 elif fd == status_read:
1150 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1152 pid, exit_status = os.waitpid(pid, 0)
1154 os.close(status_write)
1155 os.close(status_read)
1165 return output, status, exit_status
1167 ################################################################################
1169 def process_gpgv_output(status):
1170 # Process the status-fd output
1173 for line in status.split('\n'):
1177 split = line.split()
1179 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1181 (gnupg, keyword) = split[:2]
1182 if gnupg != "[GNUPG:]":
1183 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1186 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1187 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1190 keywords[keyword] = args
1192 return (keywords, internal_error)
1194 ################################################################################
1196 def retrieve_key (filename, keyserver=None, keyring=None):
1198 Retrieve the key that signed 'filename' from 'keyserver' and
1199 add it to 'keyring'. Returns nothing on success, or an error message
1203 # Defaults for keyserver and keyring
1205 keyserver = Cnf["Dinstall::KeyServer"]
1207 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1209 # Ensure the filename contains no shell meta-characters or other badness
1210 if not re_taint_free.match(filename):
1211 return "%s: tainted filename" % (filename)
1213 # Invoke gpgv on the file
1214 status_read, status_write = os.pipe()
1215 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1216 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1218 # Process the status-fd output
1219 (keywords, internal_error) = process_gpgv_output(status)
1221 return internal_error
1223 if not keywords.has_key("NO_PUBKEY"):
1224 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1226 fingerprint = keywords["NO_PUBKEY"][0]
1227 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1228 # it'll try to create a lockfile in /dev. A better solution might
1229 # be a tempfile or something.
1230 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1231 % (Cnf["Dinstall::SigningKeyring"])
1232 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1233 % (keyring, keyserver, fingerprint)
1234 (result, output) = commands.getstatusoutput(cmd)
1236 return "'%s' failed with exit code %s" % (cmd, result)
1240 ################################################################################
1242 def gpg_keyring_args(keyrings=None):
1244 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1246 return " ".join(["--keyring %s" % x for x in keyrings])
1248 ################################################################################
1250 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1252 Check the signature of a file and return the fingerprint if the
1253 signature is valid or 'None' if it's not. The first argument is the
1254 filename whose signature should be checked. The second argument is a
1255 reject function and is called when an error is found. The reject()
1256 function must allow for two arguments: the first is the error message,
1257 the second is an optional prefix string. It's possible for reject()
1258 to be called more than once during an invocation of check_signature().
1259 The third argument is optional and is the name of the files the
1260 detached signature applies to. The fourth argument is optional and is
1261 a *list* of keyrings to use. 'autofetch' can either be None, True or
1262 False. If None, the default behaviour specified in the config will be
1268 # Ensure the filename contains no shell meta-characters or other badness
1269 if not re_taint_free.match(sig_filename):
1270 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1271 return (None, rejects)
1273 if data_filename and not re_taint_free.match(data_filename):
1274 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1275 return (None, rejects)
1278 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1280 # Autofetch the signing key if that's enabled
1281 if autofetch == None:
1282 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1284 error_msg = retrieve_key(sig_filename)
1286 rejects.append(error_msg)
1287 return (None, rejects)
1289 # Build the command line
1290 status_read, status_write = os.pipe()
1291 cmd = "gpgv --status-fd %s %s %s %s" % (
1292 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1294 # Invoke gpgv on the file
1295 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1297 # Process the status-fd output
1298 (keywords, internal_error) = process_gpgv_output(status)
1300 # If we failed to parse the status-fd output, let's just whine and bail now
1302 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1303 rejects.append(internal_error, "")
1304 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1305 return (None, rejects)
1307 # Now check for obviously bad things in the processed output
1308 if keywords.has_key("KEYREVOKED"):
1309 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1310 if keywords.has_key("BADSIG"):
1311 rejects.append("bad signature on %s." % (sig_filename))
1312 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1313 rejects.append("failed to check signature on %s." % (sig_filename))
1314 if keywords.has_key("NO_PUBKEY"):
1315 args = keywords["NO_PUBKEY"]
1318 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1319 if keywords.has_key("BADARMOR"):
1320 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1321 if keywords.has_key("NODATA"):
1322 rejects.append("no signature found in %s." % (sig_filename))
1323 if keywords.has_key("EXPKEYSIG"):
1324 args = keywords["EXPKEYSIG"]
1327 rejects.append("Signature made by expired key 0x%s" % (key))
1328 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1329 args = keywords["KEYEXPIRED"]
1333 if timestamp.count("T") == 0:
1335 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1337 expiredate = "unknown (%s)" % (timestamp)
1339 expiredate = timestamp
1340 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1342 if len(rejects) > 0:
1343 return (None, rejects)
1345 # Next check gpgv exited with a zero return code
1347 rejects.append("gpgv failed while checking %s." % (sig_filename))
1349 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1351 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1352 return (None, rejects)
1354 # Sanity check the good stuff we expect
1355 if not keywords.has_key("VALIDSIG"):
1356 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1358 args = keywords["VALIDSIG"]
1360 rejects.append("internal error while checking signature on %s." % (sig_filename))
1362 fingerprint = args[0]
1363 if not keywords.has_key("GOODSIG"):
1364 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1365 if not keywords.has_key("SIG_ID"):
1366 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1368 # Finally ensure there's not something we don't recognise
1369 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1370 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1371 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1373 for keyword in keywords.keys():
1374 if not known_keywords.has_key(keyword):
1375 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1377 if len(rejects) > 0:
1378 return (None, rejects)
1380 return (fingerprint, [])
1382 ################################################################################
1384 def gpg_get_key_addresses(fingerprint):
1385 """retreive email addresses from gpg key uids for a given fingerprint"""
1386 addresses = key_uid_email_cache.get(fingerprint)
1387 if addresses != None:
1390 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1391 % (gpg_keyring_args(), fingerprint)
1392 (result, output) = commands.getstatusoutput(cmd)
1394 for l in output.split('\n'):
1395 m = re_gpg_uid.match(l)
1397 addresses.add(m.group(1))
1398 key_uid_email_cache[fingerprint] = addresses
1401 ################################################################################
1403 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1405 def wrap(paragraph, max_length, prefix=""):
1409 words = paragraph.split()
1412 word_size = len(word)
1413 if word_size > max_length:
1415 s += line + '\n' + prefix
1416 s += word + '\n' + prefix
1419 new_length = len(line) + word_size + 1
1420 if new_length > max_length:
1421 s += line + '\n' + prefix
1434 ################################################################################
1436 def clean_symlink (src, dest, root):
1438 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1441 src = src.replace(root, '', 1)
1442 dest = dest.replace(root, '', 1)
1443 dest = os.path.dirname(dest)
1444 new_src = '../' * len(dest.split('/'))
1445 return new_src + src
1447 ################################################################################
1449 def temp_filename(directory=None, prefix="dak", suffix=""):
1451 Return a secure and unique filename by pre-creating it.
1452 If 'directory' is non-null, it will be the directory the file is pre-created in.
1453 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1454 If 'suffix' is non-null, the filename will end with it.
1456 Returns a pair (fd, name).
1459 return tempfile.mkstemp(suffix, prefix, directory)
1461 ################################################################################
1463 def temp_dirname(parent=None, prefix="dak", suffix=""):
1465 Return a secure and unique directory by pre-creating it.
1466 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1467 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1468 If 'suffix' is non-null, the filename will end with it.
1470 Returns a pathname to the new directory
1473 return tempfile.mkdtemp(suffix, prefix, parent)
1475 ################################################################################
1477 def is_email_alias(email):
1478 """ checks if the user part of the email is listed in the alias file """
1480 if alias_cache == None:
1481 aliasfn = which_alias_file()
1484 for l in open(aliasfn):
1485 alias_cache.add(l.split(':')[0])
1486 uid = email.split('@')[0]
1487 return uid in alias_cache
1489 ################################################################################
1491 def get_changes_files(dir):
1493 Takes a directory and lists all .changes files in it (as well as chdir'ing
1494 to the directory; this is due to broken behaviour on the part of p-u/p-a
1495 when you're not in the right place)
1497 Returns a list of filenames
1500 # Much of the rest of p-u/p-a depends on being in the right place
1502 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1504 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1506 return changes_files
1508 ################################################################################
1512 Cnf = apt_pkg.newConfiguration()
1513 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1515 if which_conf_file() != default_config:
1516 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())