2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, \
49 re_re_mark, re_whitespace_comment, re_issource
51 from srcformats import srcformats
53 ################################################################################
55 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
56 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
58 alias_cache = None #: Cache for email alias checks
59 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
61 # (hashname, function, earliest_changes_version)
62 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
63 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
65 ################################################################################
68 """ Escape html chars """
69 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
71 ################################################################################
73 def open_file(filename, mode='r'):
75 Open C{file}, return fileobject.
77 @type filename: string
78 @param filename: path/filename to open
81 @param mode: open mode
84 @return: open fileobject
86 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
90 f = open(filename, mode)
92 raise CantOpenError, filename
95 ################################################################################
97 def our_raw_input(prompt=""):
99 sys.stdout.write(prompt)
105 sys.stderr.write("\nUser interrupt (^D).\n")
108 ################################################################################
110 def extract_component_from_section(section):
113 if section.find('/') != -1:
114 component = section.split('/')[0]
116 # Expand default component
118 if Cnf.has_key("Component::%s" % section):
123 return (section, component)
125 ################################################################################
127 def parse_deb822(contents, signing_rules=0):
131 # Split the lines in the input, keeping the linebreaks.
132 lines = contents.splitlines(True)
135 raise ParseChangesError, "[Empty changes file]"
137 # Reindex by line number so we can easily verify the format of
143 indexed_lines[index] = line[:-1]
147 num_of_lines = len(indexed_lines.keys())
150 while index < num_of_lines:
152 line = indexed_lines[index]
154 if signing_rules == 1:
156 if index > num_of_lines:
157 raise InvalidDscError, index
158 line = indexed_lines[index]
159 if not line.startswith("-----BEGIN PGP SIGNATURE"):
160 raise InvalidDscError, index
165 if line.startswith("-----BEGIN PGP SIGNATURE"):
167 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
169 if signing_rules == 1:
170 while index < num_of_lines and line != "":
172 line = indexed_lines[index]
174 # If we're not inside the signed data, don't process anything
175 if signing_rules >= 0 and not inside_signature:
177 slf = re_single_line_field.match(line)
179 field = slf.groups()[0].lower()
180 changes[field] = slf.groups()[1]
184 changes[field] += '\n'
186 mlf = re_multi_line_field.match(line)
189 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
190 if first == 1 and changes[field] != "":
191 changes[field] += '\n'
193 changes[field] += mlf.groups()[0] + '\n'
197 if signing_rules == 1 and inside_signature:
198 raise InvalidDscError, index
200 changes["filecontents"] = "".join(lines)
202 if changes.has_key("source"):
203 # Strip the source version in brackets from the source field,
204 # put it in the "source-version" field instead.
205 srcver = re_srchasver.search(changes["source"])
207 changes["source"] = srcver.group(1)
208 changes["source-version"] = srcver.group(2)
211 raise ParseChangesError, error
215 ################################################################################
217 def parse_changes(filename, signing_rules=0):
219 Parses a changes file and returns a dictionary where each field is a
220 key. The mandatory first argument is the filename of the .changes
223 signing_rules is an optional argument:
225 - If signing_rules == -1, no signature is required.
226 - If signing_rules == 0 (the default), a signature is required.
227 - If signing_rules == 1, it turns on the same strict format checking
230 The rules for (signing_rules == 1)-mode are:
232 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
233 followed by any PGP header data and must end with a blank line.
235 - The data section must end with a blank line and must be followed by
236 "-----BEGIN PGP SIGNATURE-----".
239 changes_in = open_file(filename)
240 content = changes_in.read()
243 unicode(content, 'utf-8')
245 raise ChangesUnicodeError, "Changes file not proper utf-8"
246 return parse_deb822(content, signing_rules)
248 ################################################################################
250 def hash_key(hashname):
251 return '%ssum' % hashname
253 ################################################################################
255 def create_hash(where, files, hashname, hashfunc):
257 create_hash extends the passed files dict with the given hash by
258 iterating over all files on disk and passing them to the hashing
263 for f in files.keys():
265 file_handle = open_file(f)
266 except CantOpenError:
267 rejmsg.append("Could not open file %s for checksumming" % (f))
270 files[f][hash_key(hashname)] = hashfunc(file_handle)
275 ################################################################################
277 def check_hash(where, files, hashname, hashfunc):
279 check_hash checks the given hash in the files dict against the actual
280 files on disk. The hash values need to be present consistently in
281 all file entries. It does not modify its input in any way.
285 for f in files.keys():
289 file_handle = open_file(f)
291 # Check for the hash entry, to not trigger a KeyError.
292 if not files[f].has_key(hash_key(hashname)):
293 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
297 # Actually check the hash for correctness.
298 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
299 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
301 except CantOpenError:
302 # TODO: This happens when the file is in the pool.
303 # warn("Cannot open file %s" % f)
310 ################################################################################
312 def check_size(where, files):
314 check_size checks the file sizes in the passed files dict against the
319 for f in files.keys():
324 # TODO: This happens when the file is in the pool.
328 actual_size = entry[stat.ST_SIZE]
329 size = int(files[f]["size"])
330 if size != actual_size:
331 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
332 % (f, actual_size, size, where))
335 ################################################################################
337 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
339 Verify that the files listed in the Files field of the .dsc are
340 those expected given the announced Format.
342 @type dsc_filename: string
343 @param dsc_filename: path of .dsc file
346 @param dsc: the content of the .dsc parsed by C{parse_changes()}
348 @type dsc_files: dict
349 @param dsc_files: the file list returned by C{build_file_list()}
352 @return: all errors detected
356 # Parse the file if needed
358 dsc = parse_changes(dsc_filename, signing_rules=1);
359 if dsc_files == None:
360 dsc_files = build_file_list(dsc, is_a_dsc=1)
362 # Ensure .dsc lists proper set of source files according to the format
365 has_native_tar_gz = 0
368 has_more_orig_tar = 0
371 for f in dsc_files.keys():
372 m = re_issource.match(f)
374 rejmsg.append("%s: %s in Files field not recognised as source."
378 if ftype == "orig.tar.gz":
381 elif ftype == "diff.gz":
383 elif ftype == "tar.gz":
384 has_native_tar_gz += 1
386 elif re.match(r"debian\.tar\.(gz|bz2|lzma)", ftype):
388 elif re.match(r"orig\.tar\.(gz|bz2|lzma)", ftype):
390 elif re.match(r"tar\.(gz|bz2|lzma)", ftype):
392 elif re.match(r"orig-.+\.tar\.(gz|bz2|lzma)", ftype):
393 has_more_orig_tar += 1
395 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
397 rejmsg.append("%s: lists multiple .orig tarballs." % (dsc_filename))
398 if has_native_tar > 1:
399 rejmsg.append("%s: lists multiple native tarballs." % (dsc_filename))
400 if has_debian_tar > 1 or has_debian_diff > 1:
401 rejmsg.append("%s: lists multiple debian diff/tarballs." % (dsc_filename))
403 for format in srcformats:
404 if format.re_format.match(dsc['format']):
405 msgs = format.reject_msgs(
414 rejmsg.extend(['%s: %s' % (dsc_filename, x) for x in msgs])
419 ################################################################################
421 def check_hash_fields(what, manifest):
423 check_hash_fields ensures that there are no checksum fields in the
424 given dict that we do not know about.
428 hashes = map(lambda x: x[0], known_hashes)
429 for field in manifest:
430 if field.startswith("checksums-"):
431 hashname = field.split("-",1)[1]
432 if hashname not in hashes:
433 rejmsg.append("Unsupported checksum field for %s "\
434 "in %s" % (hashname, what))
437 ################################################################################
439 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
440 if format >= version:
441 # The version should contain the specified hash.
444 # Import hashes from the changes
445 rejmsg = parse_checksums(".changes", files, changes, hashname)
449 # We need to calculate the hash because it can't possibly
452 return func(".changes", files, hashname, hashfunc)
454 # We could add the orig which might be in the pool to the files dict to
455 # access the checksums easily.
457 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
459 ensure_dsc_hashes' task is to ensure that each and every *present* hash
460 in the dsc is correct, i.e. identical to the changes file and if necessary
461 the pool. The latter task is delegated to check_hash.
465 if not dsc.has_key('Checksums-%s' % (hashname,)):
467 # Import hashes from the dsc
468 parse_checksums(".dsc", dsc_files, dsc, hashname)
470 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
473 ################################################################################
475 def parse_checksums(where, files, manifest, hashname):
477 field = 'checksums-%s' % hashname
478 if not field in manifest:
480 for line in manifest[field].split('\n'):
483 clist = line.strip().split(' ')
485 checksum, size, checkfile = clist
487 rejmsg.append("Cannot parse checksum line [%s]" % (line))
489 if not files.has_key(checkfile):
490 # TODO: check for the file's entry in the original files dict, not
491 # the one modified by (auto)byhand and other weird stuff
492 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
493 # (file, hashname, where))
495 if not files[checkfile]["size"] == size:
496 rejmsg.append("%s: size differs for files and checksums-%s entry "\
497 "in %s" % (checkfile, hashname, where))
499 files[checkfile][hash_key(hashname)] = checksum
500 for f in files.keys():
501 if not files[f].has_key(hash_key(hashname)):
502 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
506 ################################################################################
508 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
510 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
513 # Make sure we have a Files: field to parse...
514 if not changes.has_key(field):
515 raise NoFilesFieldError
517 # Make sure we recognise the format of the Files: field
518 format = re_verwithext.search(changes.get("format", "0.0"))
520 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
522 format = format.groups()
523 if format[1] == None:
524 format = int(float(format[0])), 0, format[2]
526 format = int(format[0]), int(format[1]), format[2]
527 if format[2] == None:
531 # format = (0,0) are missing format headers of which we still
532 # have some in the archive.
533 if format != (1,0) and format != (0,0) and \
534 format != (3,0,"quilt") and format != (3,0,"native"):
535 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
537 if (format < (1,5) or format > (1,8)):
538 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
539 if field != "files" and format < (1,8):
540 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
542 includes_section = (not is_a_dsc) and field == "files"
544 # Parse each entry/line:
545 for i in changes[field].split('\n'):
549 section = priority = ""
552 (md5, size, section, priority, name) = s
554 (md5, size, name) = s
556 raise ParseChangesError, i
563 (section, component) = extract_component_from_section(section)
565 files[name] = Dict(size=size, section=section,
566 priority=priority, component=component)
567 files[name][hashname] = md5
571 ################################################################################
573 def send_mail (message, filename=""):
574 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
576 # If we've been passed a string dump it into a temporary file
578 (fd, filename) = tempfile.mkstemp()
579 os.write (fd, message)
582 if Cnf.has_key("Dinstall::MailWhiteList") and \
583 Cnf["Dinstall::MailWhiteList"] != "":
584 message_in = open_file(filename)
585 message_raw = modemail.message_from_file(message_in)
589 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
591 for line in whitelist_in:
592 if not re_whitespace_comment.match(line):
593 if re_re_mark.match(line):
594 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
596 whitelist.append(re.compile(re.escape(line.strip())))
601 fields = ["To", "Bcc", "Cc"]
604 value = message_raw.get(field, None)
607 for item in value.split(","):
608 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
614 if not mail_whitelisted:
615 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
619 # Doesn't have any mail in whitelist so remove the header
621 del message_raw[field]
623 message_raw.replace_header(field, string.join(match, ", "))
625 # Change message fields in order if we don't have a To header
626 if not message_raw.has_key("To"):
629 if message_raw.has_key(field):
630 message_raw[fields[-1]] = message_raw[field]
631 del message_raw[field]
634 # Clean up any temporary files
635 # and return, as we removed all recipients.
637 os.unlink (filename);
640 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
641 os.write (fd, message_raw.as_string(True));
645 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
647 raise SendmailFailedError, output
649 # Clean up any temporary files
653 ################################################################################
655 def poolify (source, component):
658 if source[:3] == "lib":
659 return component + source[:4] + '/' + source + '/'
661 return component + source[:1] + '/' + source + '/'
663 ################################################################################
665 def move (src, dest, overwrite = 0, perms = 0664):
666 if os.path.exists(dest) and os.path.isdir(dest):
669 dest_dir = os.path.dirname(dest)
670 if not os.path.exists(dest_dir):
671 umask = os.umask(00000)
672 os.makedirs(dest_dir, 02775)
674 #print "Moving %s to %s..." % (src, dest)
675 if os.path.exists(dest) and os.path.isdir(dest):
676 dest += '/' + os.path.basename(src)
677 # Don't overwrite unless forced to
678 if os.path.exists(dest):
680 fubar("Can't move %s to %s - file already exists." % (src, dest))
682 if not os.access(dest, os.W_OK):
683 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
684 shutil.copy2(src, dest)
685 os.chmod(dest, perms)
688 def copy (src, dest, overwrite = 0, perms = 0664):
689 if os.path.exists(dest) and os.path.isdir(dest):
692 dest_dir = os.path.dirname(dest)
693 if not os.path.exists(dest_dir):
694 umask = os.umask(00000)
695 os.makedirs(dest_dir, 02775)
697 #print "Copying %s to %s..." % (src, dest)
698 if os.path.exists(dest) and os.path.isdir(dest):
699 dest += '/' + os.path.basename(src)
700 # Don't overwrite unless forced to
701 if os.path.exists(dest):
703 raise FileExistsError
705 if not os.access(dest, os.W_OK):
706 raise CantOverwriteError
707 shutil.copy2(src, dest)
708 os.chmod(dest, perms)
710 ################################################################################
713 res = socket.gethostbyaddr(socket.gethostname())
714 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
715 if database_hostname:
716 return database_hostname
720 def which_conf_file ():
721 res = socket.gethostbyaddr(socket.gethostname())
722 # In case we allow local config files per user, try if one exists
723 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
724 homedir = os.getenv("HOME")
725 confpath = os.path.join(homedir, "/etc/dak.conf")
726 if os.path.exists(confpath):
727 apt_pkg.ReadConfigFileISC(Cnf,default_config)
729 # We are still in here, so there is no local config file or we do
730 # not allow local files. Do the normal stuff.
731 if Cnf.get("Config::" + res[0] + "::DakConfig"):
732 return Cnf["Config::" + res[0] + "::DakConfig"]
734 return default_config
736 def which_apt_conf_file ():
737 res = socket.gethostbyaddr(socket.gethostname())
738 # In case we allow local config files per user, try if one exists
739 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
740 homedir = os.getenv("HOME")
741 confpath = os.path.join(homedir, "/etc/dak.conf")
742 if os.path.exists(confpath):
743 apt_pkg.ReadConfigFileISC(Cnf,default_config)
745 if Cnf.get("Config::" + res[0] + "::AptConfig"):
746 return Cnf["Config::" + res[0] + "::AptConfig"]
748 return default_apt_config
750 def which_alias_file():
751 hostname = socket.gethostbyaddr(socket.gethostname())[0]
752 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
753 if os.path.exists(aliasfn):
758 ################################################################################
760 def TemplateSubst(map, filename):
761 """ Perform a substition of template """
762 templatefile = open_file(filename)
763 template = templatefile.read()
765 template = template.replace(x, str(map[x]))
769 ################################################################################
771 def fubar(msg, exit_code=1):
772 sys.stderr.write("E: %s\n" % (msg))
776 sys.stderr.write("W: %s\n" % (msg))
778 ################################################################################
780 # Returns the user name with a laughable attempt at rfc822 conformancy
781 # (read: removing stray periods).
783 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
786 return pwd.getpwuid(os.getuid())[0]
788 ################################################################################
798 return ("%d%s" % (c, t))
800 ################################################################################
802 def cc_fix_changes (changes):
803 o = changes.get("architecture", "")
805 del changes["architecture"]
806 changes["architecture"] = {}
808 changes["architecture"][j] = 1
810 def changes_compare (a, b):
811 """ Sort by source name, source version, 'have source', and then by filename """
813 a_changes = parse_changes(a)
818 b_changes = parse_changes(b)
822 cc_fix_changes (a_changes)
823 cc_fix_changes (b_changes)
825 # Sort by source name
826 a_source = a_changes.get("source")
827 b_source = b_changes.get("source")
828 q = cmp (a_source, b_source)
832 # Sort by source version
833 a_version = a_changes.get("version", "0")
834 b_version = b_changes.get("version", "0")
835 q = apt_pkg.VersionCompare(a_version, b_version)
839 # Sort by 'have source'
840 a_has_source = a_changes["architecture"].get("source")
841 b_has_source = b_changes["architecture"].get("source")
842 if a_has_source and not b_has_source:
844 elif b_has_source and not a_has_source:
847 # Fall back to sort by filename
850 ################################################################################
852 def find_next_free (dest, too_many=100):
855 while os.path.exists(dest) and extra < too_many:
856 dest = orig_dest + '.' + repr(extra)
858 if extra >= too_many:
859 raise NoFreeFilenameError
862 ################################################################################
864 def result_join (original, sep = '\t'):
866 for i in xrange(len(original)):
867 if original[i] == None:
868 resultlist.append("")
870 resultlist.append(original[i])
871 return sep.join(resultlist)
873 ################################################################################
875 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
877 for line in str.split('\n'):
879 if line or include_blank_lines:
880 out += "%s%s\n" % (prefix, line)
881 # Strip trailing new line
886 ################################################################################
888 def validate_changes_file_arg(filename, require_changes=1):
890 'filename' is either a .changes or .dak file. If 'filename' is a
891 .dak file, it's changed to be the corresponding .changes file. The
892 function then checks if the .changes file a) exists and b) is
893 readable and returns the .changes filename if so. If there's a
894 problem, the next action depends on the option 'require_changes'
897 - If 'require_changes' == -1, errors are ignored and the .changes
898 filename is returned.
899 - If 'require_changes' == 0, a warning is given and 'None' is returned.
900 - If 'require_changes' == 1, a fatal error is raised.
905 orig_filename = filename
906 if filename.endswith(".dak"):
907 filename = filename[:-4]+".changes"
909 if not filename.endswith(".changes"):
910 error = "invalid file type; not a changes file"
912 if not os.access(filename,os.R_OK):
913 if os.path.exists(filename):
914 error = "permission denied"
916 error = "file not found"
919 if require_changes == 1:
920 fubar("%s: %s." % (orig_filename, error))
921 elif require_changes == 0:
922 warn("Skipping %s - %s" % (orig_filename, error))
924 else: # We only care about the .dak file
929 ################################################################################
932 return (arch != "source" and arch != "all")
934 ################################################################################
936 def join_with_commas_and(list):
937 if len(list) == 0: return "nothing"
938 if len(list) == 1: return list[0]
939 return ", ".join(list[:-1]) + " and " + list[-1]
941 ################################################################################
946 (pkg, version, constraint) = atom
948 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
951 pp_deps.append(pp_dep)
952 return " |".join(pp_deps)
954 ################################################################################
959 ################################################################################
961 def parse_args(Options):
962 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
963 # XXX: This should go away and everything which calls it be converted
964 # to use SQLA properly. For now, we'll just fix it not to use
965 # the old Pg interface though
966 session = DBConn().session()
970 for suitename in split_args(Options["Suite"]):
971 suite = get_suite(suitename, session=session)
972 if suite.suite_id is None:
973 warn("suite '%s' not recognised." % (suite.suite_name))
975 suite_ids_list.append(suite.suite_id)
977 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
979 fubar("No valid suite given.")
984 if Options["Component"]:
985 component_ids_list = []
986 for componentname in split_args(Options["Component"]):
987 component = get_component(componentname, session=session)
988 if component is None:
989 warn("component '%s' not recognised." % (componentname))
991 component_ids_list.append(component.component_id)
992 if component_ids_list:
993 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
995 fubar("No valid component given.")
999 # Process architecture
1000 con_architectures = ""
1002 if Options["Architecture"]:
1004 for archname in split_args(Options["Architecture"]):
1005 if archname == "source":
1008 arch = get_architecture(archname, session=session)
1010 warn("architecture '%s' not recognised." % (archname))
1012 arch_ids_list.append(arch.arch_id)
1014 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1016 if not check_source:
1017 fubar("No valid architecture given.")
1021 return (con_suites, con_architectures, con_components, check_source)
1023 ################################################################################
1025 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1026 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1029 tb = sys.exc_info()[2]
1036 frame = frame.f_back
1038 traceback.print_exc()
1040 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1041 frame.f_code.co_filename,
1043 for key, value in frame.f_locals.items():
1044 print "\t%20s = " % key,
1048 print "<unable to print>"
1050 ################################################################################
1052 def try_with_debug(function):
1060 ################################################################################
1062 def arch_compare_sw (a, b):
1064 Function for use in sorting lists of architectures.
1066 Sorts normally except that 'source' dominates all others.
1069 if a == "source" and b == "source":
1078 ################################################################################
1080 def split_args (s, dwim=1):
1082 Split command line arguments which can be separated by either commas
1083 or whitespace. If dwim is set, it will complain about string ending
1084 in comma since this usually means someone did 'dak ls -a i386, m68k
1085 foo' or something and the inevitable confusion resulting from 'm68k'
1086 being treated as an argument is undesirable.
1089 if s.find(",") == -1:
1092 if s[-1:] == "," and dwim:
1093 fubar("split_args: found trailing comma, spurious space maybe?")
1096 ################################################################################
1098 def Dict(**dict): return dict
1100 ########################################
1102 def gpgv_get_status_output(cmd, status_read, status_write):
1104 Our very own version of commands.getouputstatus(), hacked to support
1108 cmd = ['/bin/sh', '-c', cmd]
1109 p2cread, p2cwrite = os.pipe()
1110 c2pread, c2pwrite = os.pipe()
1111 errout, errin = os.pipe()
1121 for i in range(3, 256):
1122 if i != status_write:
1128 os.execvp(cmd[0], cmd)
1134 os.dup2(c2pread, c2pwrite)
1135 os.dup2(errout, errin)
1137 output = status = ""
1139 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1142 r = os.read(fd, 8196)
1144 more_data.append(fd)
1145 if fd == c2pwrite or fd == errin:
1147 elif fd == status_read:
1150 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1152 pid, exit_status = os.waitpid(pid, 0)
1154 os.close(status_write)
1155 os.close(status_read)
1165 return output, status, exit_status
1167 ################################################################################
1169 def process_gpgv_output(status):
1170 # Process the status-fd output
1173 for line in status.split('\n'):
1177 split = line.split()
1179 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1181 (gnupg, keyword) = split[:2]
1182 if gnupg != "[GNUPG:]":
1183 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1186 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1187 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1190 keywords[keyword] = args
1192 return (keywords, internal_error)
1194 ################################################################################
1196 def retrieve_key (filename, keyserver=None, keyring=None):
1198 Retrieve the key that signed 'filename' from 'keyserver' and
1199 add it to 'keyring'. Returns nothing on success, or an error message
1203 # Defaults for keyserver and keyring
1205 keyserver = Cnf["Dinstall::KeyServer"]
1207 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1209 # Ensure the filename contains no shell meta-characters or other badness
1210 if not re_taint_free.match(filename):
1211 return "%s: tainted filename" % (filename)
1213 # Invoke gpgv on the file
1214 status_read, status_write = os.pipe()
1215 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1216 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1218 # Process the status-fd output
1219 (keywords, internal_error) = process_gpgv_output(status)
1221 return internal_error
1223 if not keywords.has_key("NO_PUBKEY"):
1224 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1226 fingerprint = keywords["NO_PUBKEY"][0]
1227 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1228 # it'll try to create a lockfile in /dev. A better solution might
1229 # be a tempfile or something.
1230 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1231 % (Cnf["Dinstall::SigningKeyring"])
1232 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1233 % (keyring, keyserver, fingerprint)
1234 (result, output) = commands.getstatusoutput(cmd)
1236 return "'%s' failed with exit code %s" % (cmd, result)
1240 ################################################################################
1242 def gpg_keyring_args(keyrings=None):
1244 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1246 return " ".join(["--keyring %s" % x for x in keyrings])
1248 ################################################################################
1250 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1252 Check the signature of a file and return the fingerprint if the
1253 signature is valid or 'None' if it's not. The first argument is the
1254 filename whose signature should be checked. The second argument is a
1255 reject function and is called when an error is found. The reject()
1256 function must allow for two arguments: the first is the error message,
1257 the second is an optional prefix string. It's possible for reject()
1258 to be called more than once during an invocation of check_signature().
1259 The third argument is optional and is the name of the files the
1260 detached signature applies to. The fourth argument is optional and is
1261 a *list* of keyrings to use. 'autofetch' can either be None, True or
1262 False. If None, the default behaviour specified in the config will be
1268 # Ensure the filename contains no shell meta-characters or other badness
1269 if not re_taint_free.match(sig_filename):
1270 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1271 return (None, rejects)
1273 if data_filename and not re_taint_free.match(data_filename):
1274 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1275 return (None, rejects)
1278 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1280 # Autofetch the signing key if that's enabled
1281 if autofetch == None:
1282 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1284 error_msg = retrieve_key(sig_filename)
1286 rejects.append(error_msg)
1287 return (None, rejects)
1289 # Build the command line
1290 status_read, status_write = os.pipe()
1291 cmd = "gpgv --status-fd %s %s %s %s" % (
1292 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1294 # Invoke gpgv on the file
1295 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1297 # Process the status-fd output
1298 (keywords, internal_error) = process_gpgv_output(status)
1300 # If we failed to parse the status-fd output, let's just whine and bail now
1302 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1303 rejects.append(internal_error, "")
1304 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1305 return (None, rejects)
1307 # Now check for obviously bad things in the processed output
1308 if keywords.has_key("KEYREVOKED"):
1309 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1310 if keywords.has_key("BADSIG"):
1311 rejects.append("bad signature on %s." % (sig_filename))
1312 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1313 rejects.append("failed to check signature on %s." % (sig_filename))
1314 if keywords.has_key("NO_PUBKEY"):
1315 args = keywords["NO_PUBKEY"]
1318 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1319 if keywords.has_key("BADARMOR"):
1320 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1321 if keywords.has_key("NODATA"):
1322 rejects.append("no signature found in %s." % (sig_filename))
1323 if keywords.has_key("EXPKEYSIG"):
1324 args = keywords["EXPKEYSIG"]
1327 rejects.append("Signature made by expired key 0x%s" % (key))
1328 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1329 args = keywords["KEYEXPIRED"]
1333 if timestamp.count("T") == 0:
1335 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1337 expiredate = "unknown (%s)" % (timestamp)
1339 expiredate = timestamp
1340 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1342 if len(rejects) > 0:
1343 return (None, rejects)
1345 # Next check gpgv exited with a zero return code
1347 rejects.append("gpgv failed while checking %s." % (sig_filename))
1349 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1351 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1352 return (None, rejects)
1354 # Sanity check the good stuff we expect
1355 if not keywords.has_key("VALIDSIG"):
1356 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1358 args = keywords["VALIDSIG"]
1360 rejects.append("internal error while checking signature on %s." % (sig_filename))
1362 fingerprint = args[0]
1363 if not keywords.has_key("GOODSIG"):
1364 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1365 if not keywords.has_key("SIG_ID"):
1366 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1368 # Finally ensure there's not something we don't recognise
1369 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1370 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1371 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1373 for keyword in keywords.keys():
1374 if not known_keywords.has_key(keyword):
1375 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1377 if len(rejects) > 0:
1378 return (None, rejects)
1380 return (fingerprint, [])
1382 ################################################################################
1384 def gpg_get_key_addresses(fingerprint):
1385 """retreive email addresses from gpg key uids for a given fingerprint"""
1386 addresses = key_uid_email_cache.get(fingerprint)
1387 if addresses != None:
1390 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1391 % (gpg_keyring_args(), fingerprint)
1392 (result, output) = commands.getstatusoutput(cmd)
1394 for l in output.split('\n'):
1395 m = re_gpg_uid.match(l)
1397 addresses.add(m.group(1))
1398 key_uid_email_cache[fingerprint] = addresses
1401 ################################################################################
1403 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1405 def wrap(paragraph, max_length, prefix=""):
1409 words = paragraph.split()
1412 word_size = len(word)
1413 if word_size > max_length:
1415 s += line + '\n' + prefix
1416 s += word + '\n' + prefix
1419 new_length = len(line) + word_size + 1
1420 if new_length > max_length:
1421 s += line + '\n' + prefix
1434 ################################################################################
1436 def clean_symlink (src, dest, root):
1438 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1441 src = src.replace(root, '', 1)
1442 dest = dest.replace(root, '', 1)
1443 dest = os.path.dirname(dest)
1444 new_src = '../' * len(dest.split('/'))
1445 return new_src + src
1447 ################################################################################
1449 def temp_filename(directory=None, prefix="dak", suffix=""):
1451 Return a secure and unique filename by pre-creating it.
1452 If 'directory' is non-null, it will be the directory the file is pre-created in.
1453 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1454 If 'suffix' is non-null, the filename will end with it.
1456 Returns a pair (fd, name).
1459 return tempfile.mkstemp(suffix, prefix, directory)
1461 ################################################################################
1463 def temp_dirname(parent=None, prefix="dak", suffix=""):
1465 Return a secure and unique directory by pre-creating it.
1466 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1467 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1468 If 'suffix' is non-null, the filename will end with it.
1470 Returns a pathname to the new directory
1473 return tempfile.mkdtemp(suffix, prefix, parent)
1475 ################################################################################
1477 def is_email_alias(email):
1478 """ checks if the user part of the email is listed in the alias file """
1480 if alias_cache == None:
1481 aliasfn = which_alias_file()
1484 for l in open(aliasfn):
1485 alias_cache.add(l.split(':')[0])
1486 uid = email.split('@')[0]
1487 return uid in alias_cache
1489 ################################################################################
1491 def get_changes_files(dir):
1493 Takes a directory and lists all .changes files in it (as well as chdir'ing
1494 to the directory; this is due to broken behaviour on the part of p-u/p-a
1495 when you're not in the right place)
1497 Returns a list of filenames
1500 # Much of the rest of p-u/p-a depends on being in the right place
1502 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1504 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1506 return changes_files
1508 ################################################################################
1512 Cnf = apt_pkg.newConfiguration()
1513 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1515 if which_conf_file() != default_config:
1516 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1518 ###############################################################################