2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
50 from formats import parse_format, validate_changes_format
51 from srcformats import get_format_from_string
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
67 # all situations under lenny's Python.
69 def dak_getstatusoutput(cmd):
70 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
71 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
73 output = "".join(pipe.stdout.readlines())
75 if output[-1:] == '\n':
83 commands.getstatusoutput = dak_getstatusoutput
85 ################################################################################
88 """ Escape html chars """
89 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
91 ################################################################################
93 def open_file(filename, mode='r'):
95 Open C{file}, return fileobject.
97 @type filename: string
98 @param filename: path/filename to open
101 @param mode: open mode
104 @return: open fileobject
106 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
110 f = open(filename, mode)
112 raise CantOpenError, filename
115 ################################################################################
117 def our_raw_input(prompt=""):
119 sys.stdout.write(prompt)
125 sys.stderr.write("\nUser interrupt (^D).\n")
128 ################################################################################
130 def extract_component_from_section(section):
133 if section.find('/') != -1:
134 component = section.split('/')[0]
136 # Expand default component
138 if Cnf.has_key("Component::%s" % section):
143 return (section, component)
145 ################################################################################
147 def parse_deb822(contents, signing_rules=0):
151 # Split the lines in the input, keeping the linebreaks.
152 lines = contents.splitlines(True)
155 raise ParseChangesError, "[Empty changes file]"
157 # Reindex by line number so we can easily verify the format of
163 indexed_lines[index] = line[:-1]
167 num_of_lines = len(indexed_lines.keys())
170 while index < num_of_lines:
172 line = indexed_lines[index]
174 if signing_rules == 1:
176 if index > num_of_lines:
177 raise InvalidDscError, index
178 line = indexed_lines[index]
179 if not line.startswith("-----BEGIN PGP SIGNATURE"):
180 raise InvalidDscError, index
185 if line.startswith("-----BEGIN PGP SIGNATURE"):
187 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
189 if signing_rules == 1:
190 while index < num_of_lines and line != "":
192 line = indexed_lines[index]
194 # If we're not inside the signed data, don't process anything
195 if signing_rules >= 0 and not inside_signature:
197 slf = re_single_line_field.match(line)
199 field = slf.groups()[0].lower()
200 changes[field] = slf.groups()[1]
204 changes[field] += '\n'
206 mlf = re_multi_line_field.match(line)
209 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
210 if first == 1 and changes[field] != "":
211 changes[field] += '\n'
213 changes[field] += mlf.groups()[0] + '\n'
217 if signing_rules == 1 and inside_signature:
218 raise InvalidDscError, index
220 changes["filecontents"] = "".join(lines)
222 if changes.has_key("source"):
223 # Strip the source version in brackets from the source field,
224 # put it in the "source-version" field instead.
225 srcver = re_srchasver.search(changes["source"])
227 changes["source"] = srcver.group(1)
228 changes["source-version"] = srcver.group(2)
231 raise ParseChangesError, error
235 ################################################################################
237 def parse_changes(filename, signing_rules=0):
239 Parses a changes file and returns a dictionary where each field is a
240 key. The mandatory first argument is the filename of the .changes
243 signing_rules is an optional argument:
245 - If signing_rules == -1, no signature is required.
246 - If signing_rules == 0 (the default), a signature is required.
247 - If signing_rules == 1, it turns on the same strict format checking
250 The rules for (signing_rules == 1)-mode are:
252 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
253 followed by any PGP header data and must end with a blank line.
255 - The data section must end with a blank line and must be followed by
256 "-----BEGIN PGP SIGNATURE-----".
259 changes_in = open_file(filename)
260 content = changes_in.read()
263 unicode(content, 'utf-8')
265 raise ChangesUnicodeError, "Changes file not proper utf-8"
266 return parse_deb822(content, signing_rules)
268 ################################################################################
270 def hash_key(hashname):
271 return '%ssum' % hashname
273 ################################################################################
275 def create_hash(where, files, hashname, hashfunc):
277 create_hash extends the passed files dict with the given hash by
278 iterating over all files on disk and passing them to the hashing
283 for f in files.keys():
285 file_handle = open_file(f)
286 except CantOpenError:
287 rejmsg.append("Could not open file %s for checksumming" % (f))
290 files[f][hash_key(hashname)] = hashfunc(file_handle)
295 ################################################################################
297 def check_hash(where, files, hashname, hashfunc):
299 check_hash checks the given hash in the files dict against the actual
300 files on disk. The hash values need to be present consistently in
301 all file entries. It does not modify its input in any way.
305 for f in files.keys():
309 file_handle = open_file(f)
311 # Check for the hash entry, to not trigger a KeyError.
312 if not files[f].has_key(hash_key(hashname)):
313 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
317 # Actually check the hash for correctness.
318 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
319 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
321 except CantOpenError:
322 # TODO: This happens when the file is in the pool.
323 # warn("Cannot open file %s" % f)
330 ################################################################################
332 def check_size(where, files):
334 check_size checks the file sizes in the passed files dict against the
339 for f in files.keys():
344 # TODO: This happens when the file is in the pool.
348 actual_size = entry[stat.ST_SIZE]
349 size = int(files[f]["size"])
350 if size != actual_size:
351 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
352 % (f, actual_size, size, where))
355 ################################################################################
357 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
359 Verify that the files listed in the Files field of the .dsc are
360 those expected given the announced Format.
362 @type dsc_filename: string
363 @param dsc_filename: path of .dsc file
366 @param dsc: the content of the .dsc parsed by C{parse_changes()}
368 @type dsc_files: dict
369 @param dsc_files: the file list returned by C{build_file_list()}
372 @return: all errors detected
376 # Parse the file if needed
378 dsc = parse_changes(dsc_filename, signing_rules=1);
380 if dsc_files is None:
381 dsc_files = build_file_list(dsc, is_a_dsc=1)
383 # Ensure .dsc lists proper set of source files according to the format
385 has = defaultdict(lambda: 0)
388 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
389 (r'diff.gz', ('debian_diff',)),
390 (r'tar.gz', ('native_tar_gz', 'native_tar')),
391 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
392 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
393 (r'tar\.(gz|bz2)', ('native_tar',)),
394 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
397 for f in dsc_files.keys():
398 m = re_issource.match(f)
400 rejmsg.append("%s: %s in Files field not recognised as source."
404 # Populate 'has' dictionary by resolving keys in lookup table
406 for regex, keys in ftype_lookup:
407 if re.match(regex, m.group(3)):
413 # File does not match anything in lookup table; reject
415 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
417 # Check for multiple files
418 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
419 if has[file_type] > 1:
420 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
422 # Source format specific tests
424 format = get_format_from_string(dsc['format'])
426 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
429 except UnknownFormatError:
430 # Not an error here for now
435 ################################################################################
437 def check_hash_fields(what, manifest):
439 check_hash_fields ensures that there are no checksum fields in the
440 given dict that we do not know about.
444 hashes = map(lambda x: x[0], known_hashes)
445 for field in manifest:
446 if field.startswith("checksums-"):
447 hashname = field.split("-",1)[1]
448 if hashname not in hashes:
449 rejmsg.append("Unsupported checksum field for %s "\
450 "in %s" % (hashname, what))
453 ################################################################################
455 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
456 if format >= version:
457 # The version should contain the specified hash.
460 # Import hashes from the changes
461 rejmsg = parse_checksums(".changes", files, changes, hashname)
465 # We need to calculate the hash because it can't possibly
468 return func(".changes", files, hashname, hashfunc)
470 # We could add the orig which might be in the pool to the files dict to
471 # access the checksums easily.
473 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
475 ensure_dsc_hashes' task is to ensure that each and every *present* hash
476 in the dsc is correct, i.e. identical to the changes file and if necessary
477 the pool. The latter task is delegated to check_hash.
481 if not dsc.has_key('Checksums-%s' % (hashname,)):
483 # Import hashes from the dsc
484 parse_checksums(".dsc", dsc_files, dsc, hashname)
486 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
489 ################################################################################
491 def parse_checksums(where, files, manifest, hashname):
493 field = 'checksums-%s' % hashname
494 if not field in manifest:
496 for line in manifest[field].split('\n'):
499 clist = line.strip().split(' ')
501 checksum, size, checkfile = clist
503 rejmsg.append("Cannot parse checksum line [%s]" % (line))
505 if not files.has_key(checkfile):
506 # TODO: check for the file's entry in the original files dict, not
507 # the one modified by (auto)byhand and other weird stuff
508 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
509 # (file, hashname, where))
511 if not files[checkfile]["size"] == size:
512 rejmsg.append("%s: size differs for files and checksums-%s entry "\
513 "in %s" % (checkfile, hashname, where))
515 files[checkfile][hash_key(hashname)] = checksum
516 for f in files.keys():
517 if not files[f].has_key(hash_key(hashname)):
518 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
522 ################################################################################
524 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
526 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
529 # Make sure we have a Files: field to parse...
530 if not changes.has_key(field):
531 raise NoFilesFieldError
533 # Validate .changes Format: field
535 validate_changes_format(parse_format(changes['format']), field)
537 includes_section = (not is_a_dsc) and field == "files"
539 # Parse each entry/line:
540 for i in changes[field].split('\n'):
544 section = priority = ""
547 (md5, size, section, priority, name) = s
549 (md5, size, name) = s
551 raise ParseChangesError, i
558 (section, component) = extract_component_from_section(section)
560 files[name] = Dict(size=size, section=section,
561 priority=priority, component=component)
562 files[name][hashname] = md5
566 ################################################################################
568 def send_mail (message, filename=""):
569 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
571 # If we've been passed a string dump it into a temporary file
573 (fd, filename) = tempfile.mkstemp()
574 os.write (fd, message)
577 if Cnf.has_key("Dinstall::MailWhiteList") and \
578 Cnf["Dinstall::MailWhiteList"] != "":
579 message_in = open_file(filename)
580 message_raw = modemail.message_from_file(message_in)
584 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
586 for line in whitelist_in:
587 if not re_whitespace_comment.match(line):
588 if re_re_mark.match(line):
589 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
591 whitelist.append(re.compile(re.escape(line.strip())))
596 fields = ["To", "Bcc", "Cc"]
599 value = message_raw.get(field, None)
602 for item in value.split(","):
603 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
609 if not mail_whitelisted:
610 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
614 # Doesn't have any mail in whitelist so remove the header
616 del message_raw[field]
618 message_raw.replace_header(field, string.join(match, ", "))
620 # Change message fields in order if we don't have a To header
621 if not message_raw.has_key("To"):
624 if message_raw.has_key(field):
625 message_raw[fields[-1]] = message_raw[field]
626 del message_raw[field]
629 # Clean up any temporary files
630 # and return, as we removed all recipients.
632 os.unlink (filename);
635 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
636 os.write (fd, message_raw.as_string(True));
640 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
642 raise SendmailFailedError, output
644 # Clean up any temporary files
648 ################################################################################
650 def poolify (source, component):
653 if source[:3] == "lib":
654 return component + source[:4] + '/' + source + '/'
656 return component + source[:1] + '/' + source + '/'
658 ################################################################################
660 def move (src, dest, overwrite = 0, perms = 0664):
661 if os.path.exists(dest) and os.path.isdir(dest):
664 dest_dir = os.path.dirname(dest)
665 if not os.path.exists(dest_dir):
666 umask = os.umask(00000)
667 os.makedirs(dest_dir, 02775)
669 #print "Moving %s to %s..." % (src, dest)
670 if os.path.exists(dest) and os.path.isdir(dest):
671 dest += '/' + os.path.basename(src)
672 # Don't overwrite unless forced to
673 if os.path.exists(dest):
675 fubar("Can't move %s to %s - file already exists." % (src, dest))
677 if not os.access(dest, os.W_OK):
678 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
679 shutil.copy2(src, dest)
680 os.chmod(dest, perms)
683 def copy (src, dest, overwrite = 0, perms = 0664):
684 if os.path.exists(dest) and os.path.isdir(dest):
687 dest_dir = os.path.dirname(dest)
688 if not os.path.exists(dest_dir):
689 umask = os.umask(00000)
690 os.makedirs(dest_dir, 02775)
692 #print "Copying %s to %s..." % (src, dest)
693 if os.path.exists(dest) and os.path.isdir(dest):
694 dest += '/' + os.path.basename(src)
695 # Don't overwrite unless forced to
696 if os.path.exists(dest):
698 raise FileExistsError
700 if not os.access(dest, os.W_OK):
701 raise CantOverwriteError
702 shutil.copy2(src, dest)
703 os.chmod(dest, perms)
705 ################################################################################
708 res = socket.gethostbyaddr(socket.gethostname())
709 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
710 if database_hostname:
711 return database_hostname
715 def which_conf_file ():
716 if os.getenv("DAK_CONFIG"):
717 print(os.getenv("DAK_CONFIG"))
718 return os.getenv("DAK_CONFIG")
720 res = socket.gethostbyaddr(socket.gethostname())
721 # In case we allow local config files per user, try if one exists
722 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
723 homedir = os.getenv("HOME")
724 confpath = os.path.join(homedir, "/etc/dak.conf")
725 if os.path.exists(confpath):
726 apt_pkg.ReadConfigFileISC(Cnf,default_config)
728 # We are still in here, so there is no local config file or we do
729 # not allow local files. Do the normal stuff.
730 if Cnf.get("Config::" + res[0] + "::DakConfig"):
731 return Cnf["Config::" + res[0] + "::DakConfig"]
733 return default_config
735 def which_apt_conf_file ():
736 res = socket.gethostbyaddr(socket.gethostname())
737 # In case we allow local config files per user, try if one exists
738 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
739 homedir = os.getenv("HOME")
740 confpath = os.path.join(homedir, "/etc/dak.conf")
741 if os.path.exists(confpath):
742 apt_pkg.ReadConfigFileISC(Cnf,default_config)
744 if Cnf.get("Config::" + res[0] + "::AptConfig"):
745 return Cnf["Config::" + res[0] + "::AptConfig"]
747 return default_apt_config
749 def which_alias_file():
750 hostname = socket.gethostbyaddr(socket.gethostname())[0]
751 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
752 if os.path.exists(aliasfn):
757 ################################################################################
759 def TemplateSubst(map, filename):
760 """ Perform a substition of template """
761 templatefile = open_file(filename)
762 template = templatefile.read()
764 template = template.replace(x, str(map[x]))
768 ################################################################################
770 def fubar(msg, exit_code=1):
771 sys.stderr.write("E: %s\n" % (msg))
775 sys.stderr.write("W: %s\n" % (msg))
777 ################################################################################
779 # Returns the user name with a laughable attempt at rfc822 conformancy
780 # (read: removing stray periods).
782 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
785 return pwd.getpwuid(os.getuid())[0]
787 ################################################################################
797 return ("%d%s" % (c, t))
799 ################################################################################
801 def cc_fix_changes (changes):
802 o = changes.get("architecture", "")
804 del changes["architecture"]
805 changes["architecture"] = {}
807 changes["architecture"][j] = 1
809 def changes_compare (a, b):
810 """ Sort by source name, source version, 'have source', and then by filename """
812 a_changes = parse_changes(a)
817 b_changes = parse_changes(b)
821 cc_fix_changes (a_changes)
822 cc_fix_changes (b_changes)
824 # Sort by source name
825 a_source = a_changes.get("source")
826 b_source = b_changes.get("source")
827 q = cmp (a_source, b_source)
831 # Sort by source version
832 a_version = a_changes.get("version", "0")
833 b_version = b_changes.get("version", "0")
834 q = apt_pkg.VersionCompare(a_version, b_version)
838 # Sort by 'have source'
839 a_has_source = a_changes["architecture"].get("source")
840 b_has_source = b_changes["architecture"].get("source")
841 if a_has_source and not b_has_source:
843 elif b_has_source and not a_has_source:
846 # Fall back to sort by filename
849 ################################################################################
851 def find_next_free (dest, too_many=100):
854 while os.path.exists(dest) and extra < too_many:
855 dest = orig_dest + '.' + repr(extra)
857 if extra >= too_many:
858 raise NoFreeFilenameError
861 ################################################################################
863 def result_join (original, sep = '\t'):
865 for i in xrange(len(original)):
866 if original[i] == None:
867 resultlist.append("")
869 resultlist.append(original[i])
870 return sep.join(resultlist)
872 ################################################################################
874 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
876 for line in str.split('\n'):
878 if line or include_blank_lines:
879 out += "%s%s\n" % (prefix, line)
880 # Strip trailing new line
885 ################################################################################
887 def validate_changes_file_arg(filename, require_changes=1):
889 'filename' is either a .changes or .dak file. If 'filename' is a
890 .dak file, it's changed to be the corresponding .changes file. The
891 function then checks if the .changes file a) exists and b) is
892 readable and returns the .changes filename if so. If there's a
893 problem, the next action depends on the option 'require_changes'
896 - If 'require_changes' == -1, errors are ignored and the .changes
897 filename is returned.
898 - If 'require_changes' == 0, a warning is given and 'None' is returned.
899 - If 'require_changes' == 1, a fatal error is raised.
904 orig_filename = filename
905 if filename.endswith(".dak"):
906 filename = filename[:-4]+".changes"
908 if not filename.endswith(".changes"):
909 error = "invalid file type; not a changes file"
911 if not os.access(filename,os.R_OK):
912 if os.path.exists(filename):
913 error = "permission denied"
915 error = "file not found"
918 if require_changes == 1:
919 fubar("%s: %s." % (orig_filename, error))
920 elif require_changes == 0:
921 warn("Skipping %s - %s" % (orig_filename, error))
923 else: # We only care about the .dak file
928 ################################################################################
931 return (arch != "source" and arch != "all")
933 ################################################################################
935 def join_with_commas_and(list):
936 if len(list) == 0: return "nothing"
937 if len(list) == 1: return list[0]
938 return ", ".join(list[:-1]) + " and " + list[-1]
940 ################################################################################
945 (pkg, version, constraint) = atom
947 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
950 pp_deps.append(pp_dep)
951 return " |".join(pp_deps)
953 ################################################################################
958 ################################################################################
960 def parse_args(Options):
961 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
962 # XXX: This should go away and everything which calls it be converted
963 # to use SQLA properly. For now, we'll just fix it not to use
964 # the old Pg interface though
965 session = DBConn().session()
969 for suitename in split_args(Options["Suite"]):
970 suite = get_suite(suitename, session=session)
971 if suite.suite_id is None:
972 warn("suite '%s' not recognised." % (suite.suite_name))
974 suite_ids_list.append(suite.suite_id)
976 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
978 fubar("No valid suite given.")
983 if Options["Component"]:
984 component_ids_list = []
985 for componentname in split_args(Options["Component"]):
986 component = get_component(componentname, session=session)
987 if component is None:
988 warn("component '%s' not recognised." % (componentname))
990 component_ids_list.append(component.component_id)
991 if component_ids_list:
992 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
994 fubar("No valid component given.")
998 # Process architecture
999 con_architectures = ""
1001 if Options["Architecture"]:
1003 for archname in split_args(Options["Architecture"]):
1004 if archname == "source":
1007 arch = get_architecture(archname, session=session)
1009 warn("architecture '%s' not recognised." % (archname))
1011 arch_ids_list.append(arch.arch_id)
1013 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1015 if not check_source:
1016 fubar("No valid architecture given.")
1020 return (con_suites, con_architectures, con_components, check_source)
1022 ################################################################################
1024 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1025 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1028 tb = sys.exc_info()[2]
1035 frame = frame.f_back
1037 traceback.print_exc()
1039 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1040 frame.f_code.co_filename,
1042 for key, value in frame.f_locals.items():
1043 print "\t%20s = " % key,
1047 print "<unable to print>"
1049 ################################################################################
1051 def try_with_debug(function):
1059 ################################################################################
1061 def arch_compare_sw (a, b):
1063 Function for use in sorting lists of architectures.
1065 Sorts normally except that 'source' dominates all others.
1068 if a == "source" and b == "source":
1077 ################################################################################
1079 def split_args (s, dwim=1):
1081 Split command line arguments which can be separated by either commas
1082 or whitespace. If dwim is set, it will complain about string ending
1083 in comma since this usually means someone did 'dak ls -a i386, m68k
1084 foo' or something and the inevitable confusion resulting from 'm68k'
1085 being treated as an argument is undesirable.
1088 if s.find(",") == -1:
1091 if s[-1:] == "," and dwim:
1092 fubar("split_args: found trailing comma, spurious space maybe?")
1095 ################################################################################
1097 def Dict(**dict): return dict
1099 ########################################
1101 def gpgv_get_status_output(cmd, status_read, status_write):
1103 Our very own version of commands.getouputstatus(), hacked to support
1107 cmd = ['/bin/sh', '-c', cmd]
1108 p2cread, p2cwrite = os.pipe()
1109 c2pread, c2pwrite = os.pipe()
1110 errout, errin = os.pipe()
1120 for i in range(3, 256):
1121 if i != status_write:
1127 os.execvp(cmd[0], cmd)
1133 os.dup2(c2pread, c2pwrite)
1134 os.dup2(errout, errin)
1136 output = status = ""
1138 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1141 r = os.read(fd, 8196)
1143 more_data.append(fd)
1144 if fd == c2pwrite or fd == errin:
1146 elif fd == status_read:
1149 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1151 pid, exit_status = os.waitpid(pid, 0)
1153 os.close(status_write)
1154 os.close(status_read)
1164 return output, status, exit_status
1166 ################################################################################
1168 def process_gpgv_output(status):
1169 # Process the status-fd output
1172 for line in status.split('\n'):
1176 split = line.split()
1178 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1180 (gnupg, keyword) = split[:2]
1181 if gnupg != "[GNUPG:]":
1182 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1185 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1186 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1189 keywords[keyword] = args
1191 return (keywords, internal_error)
1193 ################################################################################
1195 def retrieve_key (filename, keyserver=None, keyring=None):
1197 Retrieve the key that signed 'filename' from 'keyserver' and
1198 add it to 'keyring'. Returns nothing on success, or an error message
1202 # Defaults for keyserver and keyring
1204 keyserver = Cnf["Dinstall::KeyServer"]
1206 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1208 # Ensure the filename contains no shell meta-characters or other badness
1209 if not re_taint_free.match(filename):
1210 return "%s: tainted filename" % (filename)
1212 # Invoke gpgv on the file
1213 status_read, status_write = os.pipe()
1214 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1215 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1217 # Process the status-fd output
1218 (keywords, internal_error) = process_gpgv_output(status)
1220 return internal_error
1222 if not keywords.has_key("NO_PUBKEY"):
1223 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1225 fingerprint = keywords["NO_PUBKEY"][0]
1226 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1227 # it'll try to create a lockfile in /dev. A better solution might
1228 # be a tempfile or something.
1229 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1230 % (Cnf["Dinstall::SigningKeyring"])
1231 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1232 % (keyring, keyserver, fingerprint)
1233 (result, output) = commands.getstatusoutput(cmd)
1235 return "'%s' failed with exit code %s" % (cmd, result)
1239 ################################################################################
1241 def gpg_keyring_args(keyrings=None):
1243 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1245 return " ".join(["--keyring %s" % x for x in keyrings])
1247 ################################################################################
1249 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1251 Check the signature of a file and return the fingerprint if the
1252 signature is valid or 'None' if it's not. The first argument is the
1253 filename whose signature should be checked. The second argument is a
1254 reject function and is called when an error is found. The reject()
1255 function must allow for two arguments: the first is the error message,
1256 the second is an optional prefix string. It's possible for reject()
1257 to be called more than once during an invocation of check_signature().
1258 The third argument is optional and is the name of the files the
1259 detached signature applies to. The fourth argument is optional and is
1260 a *list* of keyrings to use. 'autofetch' can either be None, True or
1261 False. If None, the default behaviour specified in the config will be
1267 # Ensure the filename contains no shell meta-characters or other badness
1268 if not re_taint_free.match(sig_filename):
1269 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1270 return (None, rejects)
1272 if data_filename and not re_taint_free.match(data_filename):
1273 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1274 return (None, rejects)
1277 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1279 # Autofetch the signing key if that's enabled
1280 if autofetch == None:
1281 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1283 error_msg = retrieve_key(sig_filename)
1285 rejects.append(error_msg)
1286 return (None, rejects)
1288 # Build the command line
1289 status_read, status_write = os.pipe()
1290 cmd = "gpgv --status-fd %s %s %s %s" % (
1291 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1293 # Invoke gpgv on the file
1294 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1296 # Process the status-fd output
1297 (keywords, internal_error) = process_gpgv_output(status)
1299 # If we failed to parse the status-fd output, let's just whine and bail now
1301 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1302 rejects.append(internal_error, "")
1303 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1304 return (None, rejects)
1306 # Now check for obviously bad things in the processed output
1307 if keywords.has_key("KEYREVOKED"):
1308 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1309 if keywords.has_key("BADSIG"):
1310 rejects.append("bad signature on %s." % (sig_filename))
1311 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1312 rejects.append("failed to check signature on %s." % (sig_filename))
1313 if keywords.has_key("NO_PUBKEY"):
1314 args = keywords["NO_PUBKEY"]
1317 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1318 if keywords.has_key("BADARMOR"):
1319 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1320 if keywords.has_key("NODATA"):
1321 rejects.append("no signature found in %s." % (sig_filename))
1322 if keywords.has_key("EXPKEYSIG"):
1323 args = keywords["EXPKEYSIG"]
1326 rejects.append("Signature made by expired key 0x%s" % (key))
1327 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1328 args = keywords["KEYEXPIRED"]
1332 if timestamp.count("T") == 0:
1334 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1336 expiredate = "unknown (%s)" % (timestamp)
1338 expiredate = timestamp
1339 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1341 if len(rejects) > 0:
1342 return (None, rejects)
1344 # Next check gpgv exited with a zero return code
1346 rejects.append("gpgv failed while checking %s." % (sig_filename))
1348 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1350 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1351 return (None, rejects)
1353 # Sanity check the good stuff we expect
1354 if not keywords.has_key("VALIDSIG"):
1355 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1357 args = keywords["VALIDSIG"]
1359 rejects.append("internal error while checking signature on %s." % (sig_filename))
1361 fingerprint = args[0]
1362 if not keywords.has_key("GOODSIG"):
1363 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1364 if not keywords.has_key("SIG_ID"):
1365 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1367 # Finally ensure there's not something we don't recognise
1368 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1369 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1370 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1372 for keyword in keywords.keys():
1373 if not known_keywords.has_key(keyword):
1374 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1376 if len(rejects) > 0:
1377 return (None, rejects)
1379 return (fingerprint, [])
1381 ################################################################################
1383 def gpg_get_key_addresses(fingerprint):
1384 """retreive email addresses from gpg key uids for a given fingerprint"""
1385 addresses = key_uid_email_cache.get(fingerprint)
1386 if addresses != None:
1389 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1390 % (gpg_keyring_args(), fingerprint)
1391 (result, output) = commands.getstatusoutput(cmd)
1393 for l in output.split('\n'):
1394 m = re_gpg_uid.match(l)
1396 addresses.add(m.group(1))
1397 key_uid_email_cache[fingerprint] = addresses
1400 ################################################################################
1402 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1404 def wrap(paragraph, max_length, prefix=""):
1408 words = paragraph.split()
1411 word_size = len(word)
1412 if word_size > max_length:
1414 s += line + '\n' + prefix
1415 s += word + '\n' + prefix
1418 new_length = len(line) + word_size + 1
1419 if new_length > max_length:
1420 s += line + '\n' + prefix
1433 ################################################################################
1435 def clean_symlink (src, dest, root):
1437 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1440 src = src.replace(root, '', 1)
1441 dest = dest.replace(root, '', 1)
1442 dest = os.path.dirname(dest)
1443 new_src = '../' * len(dest.split('/'))
1444 return new_src + src
1446 ################################################################################
1448 def temp_filename(directory=None, prefix="dak", suffix=""):
1450 Return a secure and unique filename by pre-creating it.
1451 If 'directory' is non-null, it will be the directory the file is pre-created in.
1452 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1453 If 'suffix' is non-null, the filename will end with it.
1455 Returns a pair (fd, name).
1458 return tempfile.mkstemp(suffix, prefix, directory)
1460 ################################################################################
1462 def temp_dirname(parent=None, prefix="dak", suffix=""):
1464 Return a secure and unique directory by pre-creating it.
1465 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1466 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1467 If 'suffix' is non-null, the filename will end with it.
1469 Returns a pathname to the new directory
1472 return tempfile.mkdtemp(suffix, prefix, parent)
1474 ################################################################################
1476 def is_email_alias(email):
1477 """ checks if the user part of the email is listed in the alias file """
1479 if alias_cache == None:
1480 aliasfn = which_alias_file()
1483 for l in open(aliasfn):
1484 alias_cache.add(l.split(':')[0])
1485 uid = email.split('@')[0]
1486 return uid in alias_cache
1488 ################################################################################
1490 def get_changes_files(dir):
1492 Takes a directory and lists all .changes files in it (as well as chdir'ing
1493 to the directory; this is due to broken behaviour on the part of p-u/p-a
1494 when you're not in the right place)
1496 Returns a list of filenames
1499 # Much of the rest of p-u/p-a depends on being in the right place
1501 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1503 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1505 return changes_files
1507 ################################################################################
1511 Cnf = apt_pkg.newConfiguration()
1512 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1514 if which_conf_file() != default_config:
1515 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())