2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite
43 from dak_exceptions import *
44 from textutils import fix_maintainer
45 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
46 re_multi_line_field, re_srchasver, re_taint_free, \
47 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
49 from formats import parse_format, validate_changes_format
50 from srcformats import get_format_from_string
51 from collections import defaultdict
53 ################################################################################
55 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
56 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
58 alias_cache = None #: Cache for email alias checks
59 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
61 # (hashname, function, earliest_changes_version)
62 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
63 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
65 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
66 # code in lenny's Python. This also affects commands.getoutput and
68 def dak_getstatusoutput(cmd):
69 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
70 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
72 output = "".join(pipe.stdout.readlines())
74 if output[-1:] == '\n':
82 commands.getstatusoutput = dak_getstatusoutput
84 ################################################################################
87 """ Escape html chars """
88 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
90 ################################################################################
92 def open_file(filename, mode='r'):
94 Open C{file}, return fileobject.
96 @type filename: string
97 @param filename: path/filename to open
100 @param mode: open mode
103 @return: open fileobject
105 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
109 f = open(filename, mode)
111 raise CantOpenError, filename
114 ################################################################################
116 def our_raw_input(prompt=""):
120 sys.stdout.write(prompt)
129 sys.stderr.write("\nUser interrupt (^D).\n")
132 ################################################################################
134 def extract_component_from_section(section):
137 if section.find('/') != -1:
138 component = section.split('/')[0]
140 # Expand default component
142 if Cnf.has_key("Component::%s" % section):
147 return (section, component)
149 ################################################################################
151 def parse_deb822(contents, signing_rules=0):
155 # Split the lines in the input, keeping the linebreaks.
156 lines = contents.splitlines(True)
159 raise ParseChangesError, "[Empty changes file]"
161 # Reindex by line number so we can easily verify the format of
167 indexed_lines[index] = line[:-1]
171 num_of_lines = len(indexed_lines.keys())
174 while index < num_of_lines:
176 line = indexed_lines[index]
178 if signing_rules == 1:
180 if index > num_of_lines:
181 raise InvalidDscError, index
182 line = indexed_lines[index]
183 if not line.startswith("-----BEGIN PGP SIGNATURE"):
184 raise InvalidDscError, index
189 if line.startswith("-----BEGIN PGP SIGNATURE"):
191 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
193 if signing_rules == 1:
194 while index < num_of_lines and line != "":
196 line = indexed_lines[index]
198 # If we're not inside the signed data, don't process anything
199 if signing_rules >= 0 and not inside_signature:
201 slf = re_single_line_field.match(line)
203 field = slf.groups()[0].lower()
204 changes[field] = slf.groups()[1]
208 changes[field] += '\n'
210 mlf = re_multi_line_field.match(line)
213 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
214 if first == 1 and changes[field] != "":
215 changes[field] += '\n'
217 changes[field] += mlf.groups()[0] + '\n'
221 if signing_rules == 1 and inside_signature:
222 raise InvalidDscError, index
224 changes["filecontents"] = "".join(lines)
226 if changes.has_key("source"):
227 # Strip the source version in brackets from the source field,
228 # put it in the "source-version" field instead.
229 srcver = re_srchasver.search(changes["source"])
231 changes["source"] = srcver.group(1)
232 changes["source-version"] = srcver.group(2)
235 raise ParseChangesError, error
239 ################################################################################
241 def parse_changes(filename, signing_rules=0):
243 Parses a changes file and returns a dictionary where each field is a
244 key. The mandatory first argument is the filename of the .changes
247 signing_rules is an optional argument:
249 - If signing_rules == -1, no signature is required.
250 - If signing_rules == 0 (the default), a signature is required.
251 - If signing_rules == 1, it turns on the same strict format checking
254 The rules for (signing_rules == 1)-mode are:
256 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
257 followed by any PGP header data and must end with a blank line.
259 - The data section must end with a blank line and must be followed by
260 "-----BEGIN PGP SIGNATURE-----".
263 changes_in = open_file(filename)
264 content = changes_in.read()
267 unicode(content, 'utf-8')
269 raise ChangesUnicodeError, "Changes file not proper utf-8"
270 return parse_deb822(content, signing_rules)
272 ################################################################################
274 def hash_key(hashname):
275 return '%ssum' % hashname
277 ################################################################################
279 def create_hash(where, files, hashname, hashfunc):
281 create_hash extends the passed files dict with the given hash by
282 iterating over all files on disk and passing them to the hashing
287 for f in files.keys():
289 file_handle = open_file(f)
290 except CantOpenError:
291 rejmsg.append("Could not open file %s for checksumming" % (f))
294 files[f][hash_key(hashname)] = hashfunc(file_handle)
299 ################################################################################
301 def check_hash(where, files, hashname, hashfunc):
303 check_hash checks the given hash in the files dict against the actual
304 files on disk. The hash values need to be present consistently in
305 all file entries. It does not modify its input in any way.
309 for f in files.keys():
313 file_handle = open_file(f)
315 # Check for the hash entry, to not trigger a KeyError.
316 if not files[f].has_key(hash_key(hashname)):
317 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
321 # Actually check the hash for correctness.
322 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
323 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
325 except CantOpenError:
326 # TODO: This happens when the file is in the pool.
327 # warn("Cannot open file %s" % f)
334 ################################################################################
336 def check_size(where, files):
338 check_size checks the file sizes in the passed files dict against the
343 for f in files.keys():
348 # TODO: This happens when the file is in the pool.
352 actual_size = entry[stat.ST_SIZE]
353 size = int(files[f]["size"])
354 if size != actual_size:
355 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
356 % (f, actual_size, size, where))
359 ################################################################################
361 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
363 Verify that the files listed in the Files field of the .dsc are
364 those expected given the announced Format.
366 @type dsc_filename: string
367 @param dsc_filename: path of .dsc file
370 @param dsc: the content of the .dsc parsed by C{parse_changes()}
372 @type dsc_files: dict
373 @param dsc_files: the file list returned by C{build_file_list()}
376 @return: all errors detected
380 # Parse the file if needed
382 dsc = parse_changes(dsc_filename, signing_rules=1);
384 if dsc_files is None:
385 dsc_files = build_file_list(dsc, is_a_dsc=1)
387 # Ensure .dsc lists proper set of source files according to the format
389 has = defaultdict(lambda: 0)
392 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
393 (r'diff.gz', ('debian_diff',)),
394 (r'tar.gz', ('native_tar_gz', 'native_tar')),
395 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
396 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
397 (r'tar\.(gz|bz2)', ('native_tar',)),
398 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
401 for f in dsc_files.keys():
402 m = re_issource.match(f)
404 rejmsg.append("%s: %s in Files field not recognised as source."
408 # Populate 'has' dictionary by resolving keys in lookup table
410 for regex, keys in ftype_lookup:
411 if re.match(regex, m.group(3)):
417 # File does not match anything in lookup table; reject
419 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
421 # Check for multiple files
422 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
423 if has[file_type] > 1:
424 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
426 # Source format specific tests
428 format = get_format_from_string(dsc['format'])
430 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
433 except UnknownFormatError:
434 # Not an error here for now
439 ################################################################################
441 def check_hash_fields(what, manifest):
443 check_hash_fields ensures that there are no checksum fields in the
444 given dict that we do not know about.
448 hashes = map(lambda x: x[0], known_hashes)
449 for field in manifest:
450 if field.startswith("checksums-"):
451 hashname = field.split("-",1)[1]
452 if hashname not in hashes:
453 rejmsg.append("Unsupported checksum field for %s "\
454 "in %s" % (hashname, what))
457 ################################################################################
459 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
460 if format >= version:
461 # The version should contain the specified hash.
464 # Import hashes from the changes
465 rejmsg = parse_checksums(".changes", files, changes, hashname)
469 # We need to calculate the hash because it can't possibly
472 return func(".changes", files, hashname, hashfunc)
474 # We could add the orig which might be in the pool to the files dict to
475 # access the checksums easily.
477 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
479 ensure_dsc_hashes' task is to ensure that each and every *present* hash
480 in the dsc is correct, i.e. identical to the changes file and if necessary
481 the pool. The latter task is delegated to check_hash.
485 if not dsc.has_key('Checksums-%s' % (hashname,)):
487 # Import hashes from the dsc
488 parse_checksums(".dsc", dsc_files, dsc, hashname)
490 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
493 ################################################################################
495 def parse_checksums(where, files, manifest, hashname):
497 field = 'checksums-%s' % hashname
498 if not field in manifest:
500 for line in manifest[field].split('\n'):
503 clist = line.strip().split(' ')
505 checksum, size, checkfile = clist
507 rejmsg.append("Cannot parse checksum line [%s]" % (line))
509 if not files.has_key(checkfile):
510 # TODO: check for the file's entry in the original files dict, not
511 # the one modified by (auto)byhand and other weird stuff
512 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
513 # (file, hashname, where))
515 if not files[checkfile]["size"] == size:
516 rejmsg.append("%s: size differs for files and checksums-%s entry "\
517 "in %s" % (checkfile, hashname, where))
519 files[checkfile][hash_key(hashname)] = checksum
520 for f in files.keys():
521 if not files[f].has_key(hash_key(hashname)):
522 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
526 ################################################################################
528 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
530 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
533 # Make sure we have a Files: field to parse...
534 if not changes.has_key(field):
535 raise NoFilesFieldError
537 # Validate .changes Format: field
539 validate_changes_format(parse_format(changes['format']), field)
541 includes_section = (not is_a_dsc) and field == "files"
543 # Parse each entry/line:
544 for i in changes[field].split('\n'):
548 section = priority = ""
551 (md5, size, section, priority, name) = s
553 (md5, size, name) = s
555 raise ParseChangesError, i
562 (section, component) = extract_component_from_section(section)
564 files[name] = dict(size=size, section=section,
565 priority=priority, component=component)
566 files[name][hashname] = md5
570 ################################################################################
572 def send_mail (message, filename=""):
573 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
575 # If we've been passed a string dump it into a temporary file
577 (fd, filename) = tempfile.mkstemp()
578 os.write (fd, message)
581 if Cnf.has_key("Dinstall::MailWhiteList") and \
582 Cnf["Dinstall::MailWhiteList"] != "":
583 message_in = open_file(filename)
584 message_raw = modemail.message_from_file(message_in)
588 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
590 for line in whitelist_in:
591 if not re_whitespace_comment.match(line):
592 if re_re_mark.match(line):
593 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
595 whitelist.append(re.compile(re.escape(line.strip())))
600 fields = ["To", "Bcc", "Cc"]
603 value = message_raw.get(field, None)
606 for item in value.split(","):
607 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
613 if not mail_whitelisted:
614 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
618 # Doesn't have any mail in whitelist so remove the header
620 del message_raw[field]
622 message_raw.replace_header(field, ', '.join(match))
624 # Change message fields in order if we don't have a To header
625 if not message_raw.has_key("To"):
628 if message_raw.has_key(field):
629 message_raw[fields[-1]] = message_raw[field]
630 del message_raw[field]
633 # Clean up any temporary files
634 # and return, as we removed all recipients.
636 os.unlink (filename);
639 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
640 os.write (fd, message_raw.as_string(True));
644 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
646 raise SendmailFailedError, output
648 # Clean up any temporary files
652 ################################################################################
654 def poolify (source, component):
657 if source[:3] == "lib":
658 return component + source[:4] + '/' + source + '/'
660 return component + source[:1] + '/' + source + '/'
662 ################################################################################
664 def move (src, dest, overwrite = 0, perms = 0664):
665 if os.path.exists(dest) and os.path.isdir(dest):
668 dest_dir = os.path.dirname(dest)
669 if not os.path.exists(dest_dir):
670 umask = os.umask(00000)
671 os.makedirs(dest_dir, 02775)
673 #print "Moving %s to %s..." % (src, dest)
674 if os.path.exists(dest) and os.path.isdir(dest):
675 dest += '/' + os.path.basename(src)
676 # Don't overwrite unless forced to
677 if os.path.exists(dest):
679 fubar("Can't move %s to %s - file already exists." % (src, dest))
681 if not os.access(dest, os.W_OK):
682 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
683 shutil.copy2(src, dest)
684 os.chmod(dest, perms)
687 def copy (src, dest, overwrite = 0, perms = 0664):
688 if os.path.exists(dest) and os.path.isdir(dest):
691 dest_dir = os.path.dirname(dest)
692 if not os.path.exists(dest_dir):
693 umask = os.umask(00000)
694 os.makedirs(dest_dir, 02775)
696 #print "Copying %s to %s..." % (src, dest)
697 if os.path.exists(dest) and os.path.isdir(dest):
698 dest += '/' + os.path.basename(src)
699 # Don't overwrite unless forced to
700 if os.path.exists(dest):
702 raise FileExistsError
704 if not os.access(dest, os.W_OK):
705 raise CantOverwriteError
706 shutil.copy2(src, dest)
707 os.chmod(dest, perms)
709 ################################################################################
712 res = socket.gethostbyaddr(socket.gethostname())
713 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
714 if database_hostname:
715 return database_hostname
719 def which_conf_file ():
720 if os.getenv('DAK_CONFIG'):
721 return os.getenv('DAK_CONFIG')
723 res = socket.gethostbyaddr(socket.gethostname())
724 # In case we allow local config files per user, try if one exists
725 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
726 homedir = os.getenv("HOME")
727 confpath = os.path.join(homedir, "/etc/dak.conf")
728 if os.path.exists(confpath):
729 apt_pkg.ReadConfigFileISC(Cnf,default_config)
731 # We are still in here, so there is no local config file or we do
732 # not allow local files. Do the normal stuff.
733 if Cnf.get("Config::" + res[0] + "::DakConfig"):
734 return Cnf["Config::" + res[0] + "::DakConfig"]
736 return default_config
738 def which_apt_conf_file ():
739 res = socket.gethostbyaddr(socket.gethostname())
740 # In case we allow local config files per user, try if one exists
741 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
742 homedir = os.getenv("HOME")
743 confpath = os.path.join(homedir, "/etc/dak.conf")
744 if os.path.exists(confpath):
745 apt_pkg.ReadConfigFileISC(Cnf,default_config)
747 if Cnf.get("Config::" + res[0] + "::AptConfig"):
748 return Cnf["Config::" + res[0] + "::AptConfig"]
750 return default_apt_config
752 def which_alias_file():
753 hostname = socket.gethostbyaddr(socket.gethostname())[0]
754 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
755 if os.path.exists(aliasfn):
760 ################################################################################
762 def TemplateSubst(subst_map, filename):
763 """ Perform a substition of template """
764 templatefile = open_file(filename)
765 template = templatefile.read()
766 for k, v in subst_map.iteritems():
767 template = template.replace(k, str(v))
771 ################################################################################
773 def fubar(msg, exit_code=1):
774 sys.stderr.write("E: %s\n" % (msg))
778 sys.stderr.write("W: %s\n" % (msg))
780 ################################################################################
782 # Returns the user name with a laughable attempt at rfc822 conformancy
783 # (read: removing stray periods).
785 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
788 return pwd.getpwuid(os.getuid())[0]
790 ################################################################################
800 return ("%d%s" % (c, t))
802 ################################################################################
804 def cc_fix_changes (changes):
805 o = changes.get("architecture", "")
807 del changes["architecture"]
808 changes["architecture"] = {}
810 changes["architecture"][j] = 1
812 def changes_compare (a, b):
813 """ Sort by source name, source version, 'have source', and then by filename """
815 a_changes = parse_changes(a)
820 b_changes = parse_changes(b)
824 cc_fix_changes (a_changes)
825 cc_fix_changes (b_changes)
827 # Sort by source name
828 a_source = a_changes.get("source")
829 b_source = b_changes.get("source")
830 q = cmp (a_source, b_source)
834 # Sort by source version
835 a_version = a_changes.get("version", "0")
836 b_version = b_changes.get("version", "0")
837 q = apt_pkg.VersionCompare(a_version, b_version)
841 # Sort by 'have source'
842 a_has_source = a_changes["architecture"].get("source")
843 b_has_source = b_changes["architecture"].get("source")
844 if a_has_source and not b_has_source:
846 elif b_has_source and not a_has_source:
849 # Fall back to sort by filename
852 ################################################################################
854 def find_next_free (dest, too_many=100):
857 while os.path.exists(dest) and extra < too_many:
858 dest = orig_dest + '.' + repr(extra)
860 if extra >= too_many:
861 raise NoFreeFilenameError
864 ################################################################################
866 def result_join (original, sep = '\t'):
868 for i in xrange(len(original)):
869 if original[i] == None:
870 resultlist.append("")
872 resultlist.append(original[i])
873 return sep.join(resultlist)
875 ################################################################################
877 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
879 for line in str.split('\n'):
881 if line or include_blank_lines:
882 out += "%s%s\n" % (prefix, line)
883 # Strip trailing new line
888 ################################################################################
890 def validate_changes_file_arg(filename, require_changes=1):
892 'filename' is either a .changes or .dak file. If 'filename' is a
893 .dak file, it's changed to be the corresponding .changes file. The
894 function then checks if the .changes file a) exists and b) is
895 readable and returns the .changes filename if so. If there's a
896 problem, the next action depends on the option 'require_changes'
899 - If 'require_changes' == -1, errors are ignored and the .changes
900 filename is returned.
901 - If 'require_changes' == 0, a warning is given and 'None' is returned.
902 - If 'require_changes' == 1, a fatal error is raised.
907 orig_filename = filename
908 if filename.endswith(".dak"):
909 filename = filename[:-4]+".changes"
911 if not filename.endswith(".changes"):
912 error = "invalid file type; not a changes file"
914 if not os.access(filename,os.R_OK):
915 if os.path.exists(filename):
916 error = "permission denied"
918 error = "file not found"
921 if require_changes == 1:
922 fubar("%s: %s." % (orig_filename, error))
923 elif require_changes == 0:
924 warn("Skipping %s - %s" % (orig_filename, error))
926 else: # We only care about the .dak file
931 ################################################################################
934 return (arch != "source" and arch != "all")
936 ################################################################################
938 def join_with_commas_and(list):
939 if len(list) == 0: return "nothing"
940 if len(list) == 1: return list[0]
941 return ", ".join(list[:-1]) + " and " + list[-1]
943 ################################################################################
948 (pkg, version, constraint) = atom
950 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
953 pp_deps.append(pp_dep)
954 return " |".join(pp_deps)
956 ################################################################################
961 ################################################################################
963 def parse_args(Options):
964 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
965 # XXX: This should go away and everything which calls it be converted
966 # to use SQLA properly. For now, we'll just fix it not to use
967 # the old Pg interface though
968 session = DBConn().session()
972 for suitename in split_args(Options["Suite"]):
973 suite = get_suite(suitename, session=session)
974 if suite.suite_id is None:
975 warn("suite '%s' not recognised." % (suite.suite_name))
977 suite_ids_list.append(suite.suite_id)
979 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
981 fubar("No valid suite given.")
986 if Options["Component"]:
987 component_ids_list = []
988 for componentname in split_args(Options["Component"]):
989 component = get_component(componentname, session=session)
990 if component is None:
991 warn("component '%s' not recognised." % (componentname))
993 component_ids_list.append(component.component_id)
994 if component_ids_list:
995 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
997 fubar("No valid component given.")
1001 # Process architecture
1002 con_architectures = ""
1004 if Options["Architecture"]:
1006 for archname in split_args(Options["Architecture"]):
1007 if archname == "source":
1010 arch = get_architecture(archname, session=session)
1012 warn("architecture '%s' not recognised." % (archname))
1014 arch_ids_list.append(arch.arch_id)
1016 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1018 if not check_source:
1019 fubar("No valid architecture given.")
1023 return (con_suites, con_architectures, con_components, check_source)
1025 ################################################################################
1027 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1028 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1031 tb = sys.exc_info()[2]
1038 frame = frame.f_back
1040 traceback.print_exc()
1042 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1043 frame.f_code.co_filename,
1045 for key, value in frame.f_locals.items():
1046 print "\t%20s = " % key,
1050 print "<unable to print>"
1052 ################################################################################
1054 def try_with_debug(function):
1062 ################################################################################
1064 def arch_compare_sw (a, b):
1066 Function for use in sorting lists of architectures.
1068 Sorts normally except that 'source' dominates all others.
1071 if a == "source" and b == "source":
1080 ################################################################################
1082 def split_args (s, dwim=1):
1084 Split command line arguments which can be separated by either commas
1085 or whitespace. If dwim is set, it will complain about string ending
1086 in comma since this usually means someone did 'dak ls -a i386, m68k
1087 foo' or something and the inevitable confusion resulting from 'm68k'
1088 being treated as an argument is undesirable.
1091 if s.find(",") == -1:
1094 if s[-1:] == "," and dwim:
1095 fubar("split_args: found trailing comma, spurious space maybe?")
1098 ################################################################################
1100 def gpgv_get_status_output(cmd, status_read, status_write):
1102 Our very own version of commands.getouputstatus(), hacked to support
1106 cmd = ['/bin/sh', '-c', cmd]
1107 p2cread, p2cwrite = os.pipe()
1108 c2pread, c2pwrite = os.pipe()
1109 errout, errin = os.pipe()
1119 for i in range(3, 256):
1120 if i != status_write:
1126 os.execvp(cmd[0], cmd)
1132 os.dup2(c2pread, c2pwrite)
1133 os.dup2(errout, errin)
1135 output = status = ""
1137 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1140 r = os.read(fd, 8196)
1142 more_data.append(fd)
1143 if fd == c2pwrite or fd == errin:
1145 elif fd == status_read:
1148 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1150 pid, exit_status = os.waitpid(pid, 0)
1152 os.close(status_write)
1153 os.close(status_read)
1163 return output, status, exit_status
1165 ################################################################################
1167 def process_gpgv_output(status):
1168 # Process the status-fd output
1171 for line in status.split('\n'):
1175 split = line.split()
1177 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1179 (gnupg, keyword) = split[:2]
1180 if gnupg != "[GNUPG:]":
1181 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1184 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1185 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1188 keywords[keyword] = args
1190 return (keywords, internal_error)
1192 ################################################################################
1194 def retrieve_key (filename, keyserver=None, keyring=None):
1196 Retrieve the key that signed 'filename' from 'keyserver' and
1197 add it to 'keyring'. Returns nothing on success, or an error message
1201 # Defaults for keyserver and keyring
1203 keyserver = Cnf["Dinstall::KeyServer"]
1205 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1207 # Ensure the filename contains no shell meta-characters or other badness
1208 if not re_taint_free.match(filename):
1209 return "%s: tainted filename" % (filename)
1211 # Invoke gpgv on the file
1212 status_read, status_write = os.pipe()
1213 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1214 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1216 # Process the status-fd output
1217 (keywords, internal_error) = process_gpgv_output(status)
1219 return internal_error
1221 if not keywords.has_key("NO_PUBKEY"):
1222 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1224 fingerprint = keywords["NO_PUBKEY"][0]
1225 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1226 # it'll try to create a lockfile in /dev. A better solution might
1227 # be a tempfile or something.
1228 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1229 % (Cnf["Dinstall::SigningKeyring"])
1230 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1231 % (keyring, keyserver, fingerprint)
1232 (result, output) = commands.getstatusoutput(cmd)
1234 return "'%s' failed with exit code %s" % (cmd, result)
1238 ################################################################################
1240 def gpg_keyring_args(keyrings=None):
1242 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1244 return " ".join(["--keyring %s" % x for x in keyrings])
1246 ################################################################################
1248 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1250 Check the signature of a file and return the fingerprint if the
1251 signature is valid or 'None' if it's not. The first argument is the
1252 filename whose signature should be checked. The second argument is a
1253 reject function and is called when an error is found. The reject()
1254 function must allow for two arguments: the first is the error message,
1255 the second is an optional prefix string. It's possible for reject()
1256 to be called more than once during an invocation of check_signature().
1257 The third argument is optional and is the name of the files the
1258 detached signature applies to. The fourth argument is optional and is
1259 a *list* of keyrings to use. 'autofetch' can either be None, True or
1260 False. If None, the default behaviour specified in the config will be
1266 # Ensure the filename contains no shell meta-characters or other badness
1267 if not re_taint_free.match(sig_filename):
1268 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1269 return (None, rejects)
1271 if data_filename and not re_taint_free.match(data_filename):
1272 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1273 return (None, rejects)
1276 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1278 # Autofetch the signing key if that's enabled
1279 if autofetch == None:
1280 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1282 error_msg = retrieve_key(sig_filename)
1284 rejects.append(error_msg)
1285 return (None, rejects)
1287 # Build the command line
1288 status_read, status_write = os.pipe()
1289 cmd = "gpgv --status-fd %s %s %s %s" % (
1290 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1292 # Invoke gpgv on the file
1293 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1295 # Process the status-fd output
1296 (keywords, internal_error) = process_gpgv_output(status)
1298 # If we failed to parse the status-fd output, let's just whine and bail now
1300 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1301 rejects.append(internal_error, "")
1302 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1303 return (None, rejects)
1305 # Now check for obviously bad things in the processed output
1306 if keywords.has_key("KEYREVOKED"):
1307 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1308 if keywords.has_key("BADSIG"):
1309 rejects.append("bad signature on %s." % (sig_filename))
1310 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1311 rejects.append("failed to check signature on %s." % (sig_filename))
1312 if keywords.has_key("NO_PUBKEY"):
1313 args = keywords["NO_PUBKEY"]
1316 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1317 if keywords.has_key("BADARMOR"):
1318 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1319 if keywords.has_key("NODATA"):
1320 rejects.append("no signature found in %s." % (sig_filename))
1321 if keywords.has_key("EXPKEYSIG"):
1322 args = keywords["EXPKEYSIG"]
1325 rejects.append("Signature made by expired key 0x%s" % (key))
1326 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1327 args = keywords["KEYEXPIRED"]
1331 if timestamp.count("T") == 0:
1333 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1335 expiredate = "unknown (%s)" % (timestamp)
1337 expiredate = timestamp
1338 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1340 if len(rejects) > 0:
1341 return (None, rejects)
1343 # Next check gpgv exited with a zero return code
1345 rejects.append("gpgv failed while checking %s." % (sig_filename))
1347 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1349 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1350 return (None, rejects)
1352 # Sanity check the good stuff we expect
1353 if not keywords.has_key("VALIDSIG"):
1354 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1356 args = keywords["VALIDSIG"]
1358 rejects.append("internal error while checking signature on %s." % (sig_filename))
1360 fingerprint = args[0]
1361 if not keywords.has_key("GOODSIG"):
1362 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1363 if not keywords.has_key("SIG_ID"):
1364 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1366 # Finally ensure there's not something we don't recognise
1367 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1368 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1369 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1371 for keyword in keywords.keys():
1372 if not known_keywords.has_key(keyword):
1373 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1375 if len(rejects) > 0:
1376 return (None, rejects)
1378 return (fingerprint, [])
1380 ################################################################################
1382 def gpg_get_key_addresses(fingerprint):
1383 """retreive email addresses from gpg key uids for a given fingerprint"""
1384 addresses = key_uid_email_cache.get(fingerprint)
1385 if addresses != None:
1388 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1389 % (gpg_keyring_args(), fingerprint)
1390 (result, output) = commands.getstatusoutput(cmd)
1392 for l in output.split('\n'):
1393 m = re_gpg_uid.match(l)
1395 addresses.add(m.group(1))
1396 key_uid_email_cache[fingerprint] = addresses
1399 ################################################################################
1401 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1403 def wrap(paragraph, max_length, prefix=""):
1407 words = paragraph.split()
1410 word_size = len(word)
1411 if word_size > max_length:
1413 s += line + '\n' + prefix
1414 s += word + '\n' + prefix
1417 new_length = len(line) + word_size + 1
1418 if new_length > max_length:
1419 s += line + '\n' + prefix
1432 ################################################################################
1434 def clean_symlink (src, dest, root):
1436 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1439 src = src.replace(root, '', 1)
1440 dest = dest.replace(root, '', 1)
1441 dest = os.path.dirname(dest)
1442 new_src = '../' * len(dest.split('/'))
1443 return new_src + src
1445 ################################################################################
1447 def temp_filename(directory=None, prefix="dak", suffix=""):
1449 Return a secure and unique filename by pre-creating it.
1450 If 'directory' is non-null, it will be the directory the file is pre-created in.
1451 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1452 If 'suffix' is non-null, the filename will end with it.
1454 Returns a pair (fd, name).
1457 return tempfile.mkstemp(suffix, prefix, directory)
1459 ################################################################################
1461 def temp_dirname(parent=None, prefix="dak", suffix=""):
1463 Return a secure and unique directory by pre-creating it.
1464 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1465 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1466 If 'suffix' is non-null, the filename will end with it.
1468 Returns a pathname to the new directory
1471 return tempfile.mkdtemp(suffix, prefix, parent)
1473 ################################################################################
1475 def is_email_alias(email):
1476 """ checks if the user part of the email is listed in the alias file """
1478 if alias_cache == None:
1479 aliasfn = which_alias_file()
1482 for l in open(aliasfn):
1483 alias_cache.add(l.split(':')[0])
1484 uid = email.split('@')[0]
1485 return uid in alias_cache
1487 ################################################################################
1489 def get_changes_files(from_dir):
1491 Takes a directory and lists all .changes files in it (as well as chdir'ing
1492 to the directory; this is due to broken behaviour on the part of p-u/p-a
1493 when you're not in the right place)
1495 Returns a list of filenames
1498 # Much of the rest of p-u/p-a depends on being in the right place
1500 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1502 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1504 return changes_files
1506 ################################################################################
1510 Cnf = apt_pkg.newConfiguration()
1511 if not os.getenv("DAK_TEST"):
1512 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1514 if which_conf_file() != default_config:
1515 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())