2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from formats import parse_format, validate_changes_format
52 from srcformats import get_format_from_string
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
68 # all situations under lenny's Python.
70 def dak_getstatusoutput(cmd):
71 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
72 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
74 output = "".join(pipe.stdout.readlines())
81 commands.getstatusoutput = dak_getstatusoutput
83 ################################################################################
86 """ Escape html chars """
87 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
89 ################################################################################
91 def open_file(filename, mode='r'):
93 Open C{file}, return fileobject.
95 @type filename: string
96 @param filename: path/filename to open
99 @param mode: open mode
102 @return: open fileobject
104 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
108 f = open(filename, mode)
110 raise CantOpenError, filename
113 ################################################################################
115 def our_raw_input(prompt=""):
117 sys.stdout.write(prompt)
123 sys.stderr.write("\nUser interrupt (^D).\n")
126 ################################################################################
128 def extract_component_from_section(section):
131 if section.find('/') != -1:
132 component = section.split('/')[0]
134 # Expand default component
136 if Cnf.has_key("Component::%s" % section):
141 return (section, component)
143 ################################################################################
145 def parse_deb822(contents, signing_rules=0):
149 # Split the lines in the input, keeping the linebreaks.
150 lines = contents.splitlines(True)
153 raise ParseChangesError, "[Empty changes file]"
155 # Reindex by line number so we can easily verify the format of
161 indexed_lines[index] = line[:-1]
165 num_of_lines = len(indexed_lines.keys())
168 while index < num_of_lines:
170 line = indexed_lines[index]
172 if signing_rules == 1:
174 if index > num_of_lines:
175 raise InvalidDscError, index
176 line = indexed_lines[index]
177 if not line.startswith("-----BEGIN PGP SIGNATURE"):
178 raise InvalidDscError, index
183 if line.startswith("-----BEGIN PGP SIGNATURE"):
185 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
187 if signing_rules == 1:
188 while index < num_of_lines and line != "":
190 line = indexed_lines[index]
192 # If we're not inside the signed data, don't process anything
193 if signing_rules >= 0 and not inside_signature:
195 slf = re_single_line_field.match(line)
197 field = slf.groups()[0].lower()
198 changes[field] = slf.groups()[1]
202 changes[field] += '\n'
204 mlf = re_multi_line_field.match(line)
207 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
208 if first == 1 and changes[field] != "":
209 changes[field] += '\n'
211 changes[field] += mlf.groups()[0] + '\n'
215 if signing_rules == 1 and inside_signature:
216 raise InvalidDscError, index
218 changes["filecontents"] = "".join(lines)
220 if changes.has_key("source"):
221 # Strip the source version in brackets from the source field,
222 # put it in the "source-version" field instead.
223 srcver = re_srchasver.search(changes["source"])
225 changes["source"] = srcver.group(1)
226 changes["source-version"] = srcver.group(2)
229 raise ParseChangesError, error
233 ################################################################################
235 def parse_changes(filename, signing_rules=0):
237 Parses a changes file and returns a dictionary where each field is a
238 key. The mandatory first argument is the filename of the .changes
241 signing_rules is an optional argument:
243 - If signing_rules == -1, no signature is required.
244 - If signing_rules == 0 (the default), a signature is required.
245 - If signing_rules == 1, it turns on the same strict format checking
248 The rules for (signing_rules == 1)-mode are:
250 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
251 followed by any PGP header data and must end with a blank line.
253 - The data section must end with a blank line and must be followed by
254 "-----BEGIN PGP SIGNATURE-----".
257 changes_in = open_file(filename)
258 content = changes_in.read()
261 unicode(content, 'utf-8')
263 raise ChangesUnicodeError, "Changes file not proper utf-8"
264 return parse_deb822(content, signing_rules)
266 ################################################################################
268 def hash_key(hashname):
269 return '%ssum' % hashname
271 ################################################################################
273 def create_hash(where, files, hashname, hashfunc):
275 create_hash extends the passed files dict with the given hash by
276 iterating over all files on disk and passing them to the hashing
281 for f in files.keys():
283 file_handle = open_file(f)
284 except CantOpenError:
285 rejmsg.append("Could not open file %s for checksumming" % (f))
288 files[f][hash_key(hashname)] = hashfunc(file_handle)
293 ################################################################################
295 def check_hash(where, files, hashname, hashfunc):
297 check_hash checks the given hash in the files dict against the actual
298 files on disk. The hash values need to be present consistently in
299 all file entries. It does not modify its input in any way.
303 for f in files.keys():
307 file_handle = open_file(f)
309 # Check for the hash entry, to not trigger a KeyError.
310 if not files[f].has_key(hash_key(hashname)):
311 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
315 # Actually check the hash for correctness.
316 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
317 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
319 except CantOpenError:
320 # TODO: This happens when the file is in the pool.
321 # warn("Cannot open file %s" % f)
328 ################################################################################
330 def check_size(where, files):
332 check_size checks the file sizes in the passed files dict against the
337 for f in files.keys():
342 # TODO: This happens when the file is in the pool.
346 actual_size = entry[stat.ST_SIZE]
347 size = int(files[f]["size"])
348 if size != actual_size:
349 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
350 % (f, actual_size, size, where))
353 ################################################################################
355 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
357 Verify that the files listed in the Files field of the .dsc are
358 those expected given the announced Format.
360 @type dsc_filename: string
361 @param dsc_filename: path of .dsc file
364 @param dsc: the content of the .dsc parsed by C{parse_changes()}
366 @type dsc_files: dict
367 @param dsc_files: the file list returned by C{build_file_list()}
370 @return: all errors detected
374 # Parse the file if needed
376 dsc = parse_changes(dsc_filename, signing_rules=1);
378 if dsc_files is None:
379 dsc_files = build_file_list(dsc, is_a_dsc=1)
381 # Ensure .dsc lists proper set of source files according to the format
383 has = defaultdict(lambda: 0)
386 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
387 (r'diff.gz', ('debian_diff',)),
388 (r'tar.gz', ('native_tar_gz', 'native_tar')),
389 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
390 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
391 (r'tar\.(gz|bz2)', ('native_tar',)),
392 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
395 for f in dsc_files.keys():
396 m = re_issource.match(f)
398 rejmsg.append("%s: %s in Files field not recognised as source."
402 # Populate 'has' dictionary by resolving keys in lookup table
404 for regex, keys in ftype_lookup:
405 if re.match(regex, m.group(3)):
411 # File does not match anything in lookup table; reject
413 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
415 # Check for multiple files
416 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
417 if has[file_type] > 1:
418 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
420 # Source format specific tests
422 format = get_format_from_string(dsc['format'])
424 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
427 except UnknownFormatError:
428 # Not an error here for now
433 ################################################################################
435 def check_hash_fields(what, manifest):
437 check_hash_fields ensures that there are no checksum fields in the
438 given dict that we do not know about.
442 hashes = map(lambda x: x[0], known_hashes)
443 for field in manifest:
444 if field.startswith("checksums-"):
445 hashname = field.split("-",1)[1]
446 if hashname not in hashes:
447 rejmsg.append("Unsupported checksum field for %s "\
448 "in %s" % (hashname, what))
451 ################################################################################
453 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
454 if format >= version:
455 # The version should contain the specified hash.
458 # Import hashes from the changes
459 rejmsg = parse_checksums(".changes", files, changes, hashname)
463 # We need to calculate the hash because it can't possibly
466 return func(".changes", files, hashname, hashfunc)
468 # We could add the orig which might be in the pool to the files dict to
469 # access the checksums easily.
471 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
473 ensure_dsc_hashes' task is to ensure that each and every *present* hash
474 in the dsc is correct, i.e. identical to the changes file and if necessary
475 the pool. The latter task is delegated to check_hash.
479 if not dsc.has_key('Checksums-%s' % (hashname,)):
481 # Import hashes from the dsc
482 parse_checksums(".dsc", dsc_files, dsc, hashname)
484 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
487 ################################################################################
489 def parse_checksums(where, files, manifest, hashname):
491 field = 'checksums-%s' % hashname
492 if not field in manifest:
494 for line in manifest[field].split('\n'):
497 clist = line.strip().split(' ')
499 checksum, size, checkfile = clist
501 rejmsg.append("Cannot parse checksum line [%s]" % (line))
503 if not files.has_key(checkfile):
504 # TODO: check for the file's entry in the original files dict, not
505 # the one modified by (auto)byhand and other weird stuff
506 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
507 # (file, hashname, where))
509 if not files[checkfile]["size"] == size:
510 rejmsg.append("%s: size differs for files and checksums-%s entry "\
511 "in %s" % (checkfile, hashname, where))
513 files[checkfile][hash_key(hashname)] = checksum
514 for f in files.keys():
515 if not files[f].has_key(hash_key(hashname)):
516 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
520 ################################################################################
522 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
524 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
527 # Make sure we have a Files: field to parse...
528 if not changes.has_key(field):
529 raise NoFilesFieldError
531 # Validate .changes Format: field
533 validate_changes_format(parse_format(changes['format']), field)
535 includes_section = (not is_a_dsc) and field == "files"
537 # Parse each entry/line:
538 for i in changes[field].split('\n'):
542 section = priority = ""
545 (md5, size, section, priority, name) = s
547 (md5, size, name) = s
549 raise ParseChangesError, i
556 (section, component) = extract_component_from_section(section)
558 files[name] = Dict(size=size, section=section,
559 priority=priority, component=component)
560 files[name][hashname] = md5
564 ################################################################################
566 def send_mail (message, filename=""):
567 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
569 # If we've been passed a string dump it into a temporary file
571 (fd, filename) = tempfile.mkstemp()
572 os.write (fd, message)
575 if Cnf.has_key("Dinstall::MailWhiteList") and \
576 Cnf["Dinstall::MailWhiteList"] != "":
577 message_in = open_file(filename)
578 message_raw = modemail.message_from_file(message_in)
582 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
584 for line in whitelist_in:
585 if not re_whitespace_comment.match(line):
586 if re_re_mark.match(line):
587 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
589 whitelist.append(re.compile(re.escape(line.strip())))
594 fields = ["To", "Bcc", "Cc"]
597 value = message_raw.get(field, None)
600 for item in value.split(","):
601 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
607 if not mail_whitelisted:
608 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
612 # Doesn't have any mail in whitelist so remove the header
614 del message_raw[field]
616 message_raw.replace_header(field, string.join(match, ", "))
618 # Change message fields in order if we don't have a To header
619 if not message_raw.has_key("To"):
622 if message_raw.has_key(field):
623 message_raw[fields[-1]] = message_raw[field]
624 del message_raw[field]
627 # Clean up any temporary files
628 # and return, as we removed all recipients.
630 os.unlink (filename);
633 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
634 os.write (fd, message_raw.as_string(True));
638 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
640 raise SendmailFailedError, output
642 # Clean up any temporary files
646 ################################################################################
648 def poolify (source, component):
651 if source[:3] == "lib":
652 return component + source[:4] + '/' + source + '/'
654 return component + source[:1] + '/' + source + '/'
656 ################################################################################
658 def move (src, dest, overwrite = 0, perms = 0664):
659 if os.path.exists(dest) and os.path.isdir(dest):
662 dest_dir = os.path.dirname(dest)
663 if not os.path.exists(dest_dir):
664 umask = os.umask(00000)
665 os.makedirs(dest_dir, 02775)
667 #print "Moving %s to %s..." % (src, dest)
668 if os.path.exists(dest) and os.path.isdir(dest):
669 dest += '/' + os.path.basename(src)
670 # Don't overwrite unless forced to
671 if os.path.exists(dest):
673 fubar("Can't move %s to %s - file already exists." % (src, dest))
675 if not os.access(dest, os.W_OK):
676 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
677 shutil.copy2(src, dest)
678 os.chmod(dest, perms)
681 def copy (src, dest, overwrite = 0, perms = 0664):
682 if os.path.exists(dest) and os.path.isdir(dest):
685 dest_dir = os.path.dirname(dest)
686 if not os.path.exists(dest_dir):
687 umask = os.umask(00000)
688 os.makedirs(dest_dir, 02775)
690 #print "Copying %s to %s..." % (src, dest)
691 if os.path.exists(dest) and os.path.isdir(dest):
692 dest += '/' + os.path.basename(src)
693 # Don't overwrite unless forced to
694 if os.path.exists(dest):
696 raise FileExistsError
698 if not os.access(dest, os.W_OK):
699 raise CantOverwriteError
700 shutil.copy2(src, dest)
701 os.chmod(dest, perms)
703 ################################################################################
706 res = socket.gethostbyaddr(socket.gethostname())
707 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
708 if database_hostname:
709 return database_hostname
713 def which_conf_file ():
714 if os.getenv("DAK_CONFIG"):
715 print(os.getenv("DAK_CONFIG"))
716 return os.getenv("DAK_CONFIG")
718 res = socket.gethostbyaddr(socket.gethostname())
719 # In case we allow local config files per user, try if one exists
720 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
721 homedir = os.getenv("HOME")
722 confpath = os.path.join(homedir, "/etc/dak.conf")
723 if os.path.exists(confpath):
724 apt_pkg.ReadConfigFileISC(Cnf,default_config)
726 # We are still in here, so there is no local config file or we do
727 # not allow local files. Do the normal stuff.
728 if Cnf.get("Config::" + res[0] + "::DakConfig"):
729 return Cnf["Config::" + res[0] + "::DakConfig"]
731 return default_config
733 def which_apt_conf_file ():
734 res = socket.gethostbyaddr(socket.gethostname())
735 # In case we allow local config files per user, try if one exists
736 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
737 homedir = os.getenv("HOME")
738 confpath = os.path.join(homedir, "/etc/dak.conf")
739 if os.path.exists(confpath):
740 apt_pkg.ReadConfigFileISC(Cnf,default_config)
742 if Cnf.get("Config::" + res[0] + "::AptConfig"):
743 return Cnf["Config::" + res[0] + "::AptConfig"]
745 return default_apt_config
747 def which_alias_file():
748 hostname = socket.gethostbyaddr(socket.gethostname())[0]
749 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
750 if os.path.exists(aliasfn):
755 ################################################################################
757 def TemplateSubst(map, filename):
758 """ Perform a substition of template """
759 templatefile = open_file(filename)
760 template = templatefile.read()
762 template = template.replace(x, str(map[x]))
766 ################################################################################
768 def fubar(msg, exit_code=1):
769 sys.stderr.write("E: %s\n" % (msg))
773 sys.stderr.write("W: %s\n" % (msg))
775 ################################################################################
777 # Returns the user name with a laughable attempt at rfc822 conformancy
778 # (read: removing stray periods).
780 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
783 return pwd.getpwuid(os.getuid())[0]
785 ################################################################################
795 return ("%d%s" % (c, t))
797 ################################################################################
799 def cc_fix_changes (changes):
800 o = changes.get("architecture", "")
802 del changes["architecture"]
803 changes["architecture"] = {}
805 changes["architecture"][j] = 1
807 def changes_compare (a, b):
808 """ Sort by source name, source version, 'have source', and then by filename """
810 a_changes = parse_changes(a)
815 b_changes = parse_changes(b)
819 cc_fix_changes (a_changes)
820 cc_fix_changes (b_changes)
822 # Sort by source name
823 a_source = a_changes.get("source")
824 b_source = b_changes.get("source")
825 q = cmp (a_source, b_source)
829 # Sort by source version
830 a_version = a_changes.get("version", "0")
831 b_version = b_changes.get("version", "0")
832 q = apt_pkg.VersionCompare(a_version, b_version)
836 # Sort by 'have source'
837 a_has_source = a_changes["architecture"].get("source")
838 b_has_source = b_changes["architecture"].get("source")
839 if a_has_source and not b_has_source:
841 elif b_has_source and not a_has_source:
844 # Fall back to sort by filename
847 ################################################################################
849 def find_next_free (dest, too_many=100):
852 while os.path.exists(dest) and extra < too_many:
853 dest = orig_dest + '.' + repr(extra)
855 if extra >= too_many:
856 raise NoFreeFilenameError
859 ################################################################################
861 def result_join (original, sep = '\t'):
863 for i in xrange(len(original)):
864 if original[i] == None:
865 resultlist.append("")
867 resultlist.append(original[i])
868 return sep.join(resultlist)
870 ################################################################################
872 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
874 for line in str.split('\n'):
876 if line or include_blank_lines:
877 out += "%s%s\n" % (prefix, line)
878 # Strip trailing new line
883 ################################################################################
885 def validate_changes_file_arg(filename, require_changes=1):
887 'filename' is either a .changes or .dak file. If 'filename' is a
888 .dak file, it's changed to be the corresponding .changes file. The
889 function then checks if the .changes file a) exists and b) is
890 readable and returns the .changes filename if so. If there's a
891 problem, the next action depends on the option 'require_changes'
894 - If 'require_changes' == -1, errors are ignored and the .changes
895 filename is returned.
896 - If 'require_changes' == 0, a warning is given and 'None' is returned.
897 - If 'require_changes' == 1, a fatal error is raised.
902 orig_filename = filename
903 if filename.endswith(".dak"):
904 filename = filename[:-4]+".changes"
906 if not filename.endswith(".changes"):
907 error = "invalid file type; not a changes file"
909 if not os.access(filename,os.R_OK):
910 if os.path.exists(filename):
911 error = "permission denied"
913 error = "file not found"
916 if require_changes == 1:
917 fubar("%s: %s." % (orig_filename, error))
918 elif require_changes == 0:
919 warn("Skipping %s - %s" % (orig_filename, error))
921 else: # We only care about the .dak file
926 ################################################################################
929 return (arch != "source" and arch != "all")
931 ################################################################################
933 def join_with_commas_and(list):
934 if len(list) == 0: return "nothing"
935 if len(list) == 1: return list[0]
936 return ", ".join(list[:-1]) + " and " + list[-1]
938 ################################################################################
943 (pkg, version, constraint) = atom
945 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
948 pp_deps.append(pp_dep)
949 return " |".join(pp_deps)
951 ################################################################################
956 ################################################################################
958 def parse_args(Options):
959 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
960 # XXX: This should go away and everything which calls it be converted
961 # to use SQLA properly. For now, we'll just fix it not to use
962 # the old Pg interface though
963 session = DBConn().session()
967 for suitename in split_args(Options["Suite"]):
968 suite = get_suite(suitename, session=session)
969 if suite.suite_id is None:
970 warn("suite '%s' not recognised." % (suite.suite_name))
972 suite_ids_list.append(suite.suite_id)
974 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
976 fubar("No valid suite given.")
981 if Options["Component"]:
982 component_ids_list = []
983 for componentname in split_args(Options["Component"]):
984 component = get_component(componentname, session=session)
985 if component is None:
986 warn("component '%s' not recognised." % (componentname))
988 component_ids_list.append(component.component_id)
989 if component_ids_list:
990 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
992 fubar("No valid component given.")
996 # Process architecture
997 con_architectures = ""
999 if Options["Architecture"]:
1001 for archname in split_args(Options["Architecture"]):
1002 if archname == "source":
1005 arch = get_architecture(archname, session=session)
1007 warn("architecture '%s' not recognised." % (archname))
1009 arch_ids_list.append(arch.arch_id)
1011 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1013 if not check_source:
1014 fubar("No valid architecture given.")
1018 return (con_suites, con_architectures, con_components, check_source)
1020 ################################################################################
1022 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1023 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1026 tb = sys.exc_info()[2]
1033 frame = frame.f_back
1035 traceback.print_exc()
1037 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1038 frame.f_code.co_filename,
1040 for key, value in frame.f_locals.items():
1041 print "\t%20s = " % key,
1045 print "<unable to print>"
1047 ################################################################################
1049 def try_with_debug(function):
1057 ################################################################################
1059 def arch_compare_sw (a, b):
1061 Function for use in sorting lists of architectures.
1063 Sorts normally except that 'source' dominates all others.
1066 if a == "source" and b == "source":
1075 ################################################################################
1077 def split_args (s, dwim=1):
1079 Split command line arguments which can be separated by either commas
1080 or whitespace. If dwim is set, it will complain about string ending
1081 in comma since this usually means someone did 'dak ls -a i386, m68k
1082 foo' or something and the inevitable confusion resulting from 'm68k'
1083 being treated as an argument is undesirable.
1086 if s.find(",") == -1:
1089 if s[-1:] == "," and dwim:
1090 fubar("split_args: found trailing comma, spurious space maybe?")
1093 ################################################################################
1095 def Dict(**dict): return dict
1097 ########################################
1099 def gpgv_get_status_output(cmd, status_read, status_write):
1101 Our very own version of commands.getouputstatus(), hacked to support
1105 cmd = ['/bin/sh', '-c', cmd]
1106 p2cread, p2cwrite = os.pipe()
1107 c2pread, c2pwrite = os.pipe()
1108 errout, errin = os.pipe()
1118 for i in range(3, 256):
1119 if i != status_write:
1125 os.execvp(cmd[0], cmd)
1131 os.dup2(c2pread, c2pwrite)
1132 os.dup2(errout, errin)
1134 output = status = ""
1136 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1139 r = os.read(fd, 8196)
1141 more_data.append(fd)
1142 if fd == c2pwrite or fd == errin:
1144 elif fd == status_read:
1147 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1149 pid, exit_status = os.waitpid(pid, 0)
1151 os.close(status_write)
1152 os.close(status_read)
1162 return output, status, exit_status
1164 ################################################################################
1166 def process_gpgv_output(status):
1167 # Process the status-fd output
1170 for line in status.split('\n'):
1174 split = line.split()
1176 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1178 (gnupg, keyword) = split[:2]
1179 if gnupg != "[GNUPG:]":
1180 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1183 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1184 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1187 keywords[keyword] = args
1189 return (keywords, internal_error)
1191 ################################################################################
1193 def retrieve_key (filename, keyserver=None, keyring=None):
1195 Retrieve the key that signed 'filename' from 'keyserver' and
1196 add it to 'keyring'. Returns nothing on success, or an error message
1200 # Defaults for keyserver and keyring
1202 keyserver = Cnf["Dinstall::KeyServer"]
1204 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1206 # Ensure the filename contains no shell meta-characters or other badness
1207 if not re_taint_free.match(filename):
1208 return "%s: tainted filename" % (filename)
1210 # Invoke gpgv on the file
1211 status_read, status_write = os.pipe()
1212 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1213 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1215 # Process the status-fd output
1216 (keywords, internal_error) = process_gpgv_output(status)
1218 return internal_error
1220 if not keywords.has_key("NO_PUBKEY"):
1221 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1223 fingerprint = keywords["NO_PUBKEY"][0]
1224 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1225 # it'll try to create a lockfile in /dev. A better solution might
1226 # be a tempfile or something.
1227 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1228 % (Cnf["Dinstall::SigningKeyring"])
1229 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1230 % (keyring, keyserver, fingerprint)
1231 (result, output) = commands.getstatusoutput(cmd)
1233 return "'%s' failed with exit code %s" % (cmd, result)
1237 ################################################################################
1239 def gpg_keyring_args(keyrings=None):
1241 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1243 return " ".join(["--keyring %s" % x for x in keyrings])
1245 ################################################################################
1247 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1249 Check the signature of a file and return the fingerprint if the
1250 signature is valid or 'None' if it's not. The first argument is the
1251 filename whose signature should be checked. The second argument is a
1252 reject function and is called when an error is found. The reject()
1253 function must allow for two arguments: the first is the error message,
1254 the second is an optional prefix string. It's possible for reject()
1255 to be called more than once during an invocation of check_signature().
1256 The third argument is optional and is the name of the files the
1257 detached signature applies to. The fourth argument is optional and is
1258 a *list* of keyrings to use. 'autofetch' can either be None, True or
1259 False. If None, the default behaviour specified in the config will be
1265 # Ensure the filename contains no shell meta-characters or other badness
1266 if not re_taint_free.match(sig_filename):
1267 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1268 return (None, rejects)
1270 if data_filename and not re_taint_free.match(data_filename):
1271 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1272 return (None, rejects)
1275 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1277 # Autofetch the signing key if that's enabled
1278 if autofetch == None:
1279 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1281 error_msg = retrieve_key(sig_filename)
1283 rejects.append(error_msg)
1284 return (None, rejects)
1286 # Build the command line
1287 status_read, status_write = os.pipe()
1288 cmd = "gpgv --status-fd %s %s %s %s" % (
1289 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1291 # Invoke gpgv on the file
1292 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1294 # Process the status-fd output
1295 (keywords, internal_error) = process_gpgv_output(status)
1297 # If we failed to parse the status-fd output, let's just whine and bail now
1299 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1300 rejects.append(internal_error, "")
1301 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1302 return (None, rejects)
1304 # Now check for obviously bad things in the processed output
1305 if keywords.has_key("KEYREVOKED"):
1306 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1307 if keywords.has_key("BADSIG"):
1308 rejects.append("bad signature on %s." % (sig_filename))
1309 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1310 rejects.append("failed to check signature on %s." % (sig_filename))
1311 if keywords.has_key("NO_PUBKEY"):
1312 args = keywords["NO_PUBKEY"]
1315 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1316 if keywords.has_key("BADARMOR"):
1317 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1318 if keywords.has_key("NODATA"):
1319 rejects.append("no signature found in %s." % (sig_filename))
1320 if keywords.has_key("EXPKEYSIG"):
1321 args = keywords["EXPKEYSIG"]
1324 rejects.append("Signature made by expired key 0x%s" % (key))
1325 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1326 args = keywords["KEYEXPIRED"]
1330 if timestamp.count("T") == 0:
1332 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1334 expiredate = "unknown (%s)" % (timestamp)
1336 expiredate = timestamp
1337 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1339 if len(rejects) > 0:
1340 return (None, rejects)
1342 # Next check gpgv exited with a zero return code
1344 rejects.append("gpgv failed while checking %s." % (sig_filename))
1346 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1348 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1349 return (None, rejects)
1351 # Sanity check the good stuff we expect
1352 if not keywords.has_key("VALIDSIG"):
1353 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1355 args = keywords["VALIDSIG"]
1357 rejects.append("internal error while checking signature on %s." % (sig_filename))
1359 fingerprint = args[0]
1360 if not keywords.has_key("GOODSIG"):
1361 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1362 if not keywords.has_key("SIG_ID"):
1363 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1365 # Finally ensure there's not something we don't recognise
1366 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1367 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1368 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1370 for keyword in keywords.keys():
1371 if not known_keywords.has_key(keyword):
1372 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1374 if len(rejects) > 0:
1375 return (None, rejects)
1377 return (fingerprint, [])
1379 ################################################################################
1381 def gpg_get_key_addresses(fingerprint):
1382 """retreive email addresses from gpg key uids for a given fingerprint"""
1383 addresses = key_uid_email_cache.get(fingerprint)
1384 if addresses != None:
1387 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1388 % (gpg_keyring_args(), fingerprint)
1389 (result, output) = commands.getstatusoutput(cmd)
1391 for l in output.split('\n'):
1392 m = re_gpg_uid.match(l)
1394 addresses.add(m.group(1))
1395 key_uid_email_cache[fingerprint] = addresses
1398 ################################################################################
1400 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1402 def wrap(paragraph, max_length, prefix=""):
1406 words = paragraph.split()
1409 word_size = len(word)
1410 if word_size > max_length:
1412 s += line + '\n' + prefix
1413 s += word + '\n' + prefix
1416 new_length = len(line) + word_size + 1
1417 if new_length > max_length:
1418 s += line + '\n' + prefix
1431 ################################################################################
1433 def clean_symlink (src, dest, root):
1435 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1438 src = src.replace(root, '', 1)
1439 dest = dest.replace(root, '', 1)
1440 dest = os.path.dirname(dest)
1441 new_src = '../' * len(dest.split('/'))
1442 return new_src + src
1444 ################################################################################
1446 def temp_filename(directory=None, prefix="dak", suffix=""):
1448 Return a secure and unique filename by pre-creating it.
1449 If 'directory' is non-null, it will be the directory the file is pre-created in.
1450 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1451 If 'suffix' is non-null, the filename will end with it.
1453 Returns a pair (fd, name).
1456 return tempfile.mkstemp(suffix, prefix, directory)
1458 ################################################################################
1460 def temp_dirname(parent=None, prefix="dak", suffix=""):
1462 Return a secure and unique directory by pre-creating it.
1463 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1464 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1465 If 'suffix' is non-null, the filename will end with it.
1467 Returns a pathname to the new directory
1470 return tempfile.mkdtemp(suffix, prefix, parent)
1472 ################################################################################
1474 def is_email_alias(email):
1475 """ checks if the user part of the email is listed in the alias file """
1477 if alias_cache == None:
1478 aliasfn = which_alias_file()
1481 for l in open(aliasfn):
1482 alias_cache.add(l.split(':')[0])
1483 uid = email.split('@')[0]
1484 return uid in alias_cache
1486 ################################################################################
1488 def get_changes_files(dir):
1490 Takes a directory and lists all .changes files in it (as well as chdir'ing
1491 to the directory; this is due to broken behaviour on the part of p-u/p-a
1492 when you're not in the right place)
1494 Returns a list of filenames
1497 # Much of the rest of p-u/p-a depends on being in the right place
1499 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1501 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1503 return changes_files
1505 ################################################################################
1509 Cnf = apt_pkg.newConfiguration()
1510 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1512 if which_conf_file() != default_config:
1513 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())