2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, \
49 re_re_mark, re_whitespace_comment, re_issource
51 from srcformats import srcformats
53 ################################################################################
55 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
56 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
58 alias_cache = None #: Cache for email alias checks
59 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
61 # (hashname, function, earliest_changes_version)
62 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
63 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
65 ################################################################################
68 """ Escape html chars """
69 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
71 ################################################################################
73 def open_file(filename, mode='r'):
75 Open C{file}, return fileobject.
77 @type filename: string
78 @param filename: path/filename to open
81 @param mode: open mode
84 @return: open fileobject
86 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
90 f = open(filename, mode)
92 raise CantOpenError, filename
95 ################################################################################
97 def our_raw_input(prompt=""):
99 sys.stdout.write(prompt)
105 sys.stderr.write("\nUser interrupt (^D).\n")
108 ################################################################################
110 def extract_component_from_section(section):
113 if section.find('/') != -1:
114 component = section.split('/')[0]
116 # Expand default component
118 if Cnf.has_key("Component::%s" % section):
123 return (section, component)
125 ################################################################################
127 def parse_deb822(contents, signing_rules=0):
131 # Split the lines in the input, keeping the linebreaks.
132 lines = contents.splitlines(True)
135 raise ParseChangesError, "[Empty changes file]"
137 # Reindex by line number so we can easily verify the format of
143 indexed_lines[index] = line[:-1]
147 num_of_lines = len(indexed_lines.keys())
150 while index < num_of_lines:
152 line = indexed_lines[index]
154 if signing_rules == 1:
156 if index > num_of_lines:
157 raise InvalidDscError, index
158 line = indexed_lines[index]
159 if not line.startswith("-----BEGIN PGP SIGNATURE"):
160 raise InvalidDscError, index
165 if line.startswith("-----BEGIN PGP SIGNATURE"):
167 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
169 if signing_rules == 1:
170 while index < num_of_lines and line != "":
172 line = indexed_lines[index]
174 # If we're not inside the signed data, don't process anything
175 if signing_rules >= 0 and not inside_signature:
177 slf = re_single_line_field.match(line)
179 field = slf.groups()[0].lower()
180 changes[field] = slf.groups()[1]
184 changes[field] += '\n'
186 mlf = re_multi_line_field.match(line)
189 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
190 if first == 1 and changes[field] != "":
191 changes[field] += '\n'
193 changes[field] += mlf.groups()[0] + '\n'
197 if signing_rules == 1 and inside_signature:
198 raise InvalidDscError, index
200 changes["filecontents"] = "".join(lines)
202 if changes.has_key("source"):
203 # Strip the source version in brackets from the source field,
204 # put it in the "source-version" field instead.
205 srcver = re_srchasver.search(changes["source"])
207 changes["source"] = srcver.group(1)
208 changes["source-version"] = srcver.group(2)
211 raise ParseChangesError, error
215 ################################################################################
217 def parse_changes(filename, signing_rules=0):
219 Parses a changes file and returns a dictionary where each field is a
220 key. The mandatory first argument is the filename of the .changes
223 signing_rules is an optional argument:
225 - If signing_rules == -1, no signature is required.
226 - If signing_rules == 0 (the default), a signature is required.
227 - If signing_rules == 1, it turns on the same strict format checking
230 The rules for (signing_rules == 1)-mode are:
232 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
233 followed by any PGP header data and must end with a blank line.
235 - The data section must end with a blank line and must be followed by
236 "-----BEGIN PGP SIGNATURE-----".
239 changes_in = open_file(filename)
240 content = changes_in.read()
243 unicode(content, 'utf-8')
245 raise ChangesUnicodeError, "Changes file not proper utf-8"
246 return parse_deb822(content, signing_rules)
248 ################################################################################
250 def hash_key(hashname):
251 return '%ssum' % hashname
253 ################################################################################
255 def create_hash(where, files, hashname, hashfunc):
257 create_hash extends the passed files dict with the given hash by
258 iterating over all files on disk and passing them to the hashing
263 for f in files.keys():
265 file_handle = open_file(f)
266 except CantOpenError:
267 rejmsg.append("Could not open file %s for checksumming" % (f))
270 files[f][hash_key(hashname)] = hashfunc(file_handle)
275 ################################################################################
277 def check_hash(where, files, hashname, hashfunc):
279 check_hash checks the given hash in the files dict against the actual
280 files on disk. The hash values need to be present consistently in
281 all file entries. It does not modify its input in any way.
285 for f in files.keys():
289 file_handle = open_file(f)
291 # Check for the hash entry, to not trigger a KeyError.
292 if not files[f].has_key(hash_key(hashname)):
293 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
297 # Actually check the hash for correctness.
298 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
299 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
301 except CantOpenError:
302 # TODO: This happens when the file is in the pool.
303 # warn("Cannot open file %s" % f)
310 ################################################################################
312 def check_size(where, files):
314 check_size checks the file sizes in the passed files dict against the
319 for f in files.keys():
324 # TODO: This happens when the file is in the pool.
328 actual_size = entry[stat.ST_SIZE]
329 size = int(files[f]["size"])
330 if size != actual_size:
331 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
332 % (f, actual_size, size, where))
335 ################################################################################
337 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
339 Verify that the files listed in the Files field of the .dsc are
340 those expected given the announced Format.
342 @type dsc_filename: string
343 @param dsc_filename: path of .dsc file
346 @param dsc: the content of the .dsc parsed by C{parse_changes()}
348 @type dsc_files: dict
349 @param dsc_files: the file list returned by C{build_file_list()}
352 @return: all errors detected
356 # Parse the file if needed
358 dsc = parse_changes(dsc_filename, signing_rules=1);
360 if dsc_files is None:
361 dsc_files = build_file_list(dsc, is_a_dsc=1)
363 # Ensure .dsc lists proper set of source files according to the format
366 has_native_tar_gz = 0
369 has_more_orig_tar = 0
372 for f in dsc_files.keys():
373 m = re_issource.match(f)
375 rejmsg.append("%s: %s in Files field not recognised as source."
379 if ftype == "orig.tar.gz":
382 elif ftype == "diff.gz":
384 elif ftype == "tar.gz":
385 has_native_tar_gz += 1
387 elif re.match(r"debian\.tar\.(gz|bz2|lzma)", ftype):
389 elif re.match(r"orig\.tar\.(gz|bz2|lzma)", ftype):
391 elif re.match(r"tar\.(gz|bz2|lzma)", ftype):
393 elif re.match(r"orig-.+\.tar\.(gz|bz2|lzma)", ftype):
394 has_more_orig_tar += 1
396 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
398 rejmsg.append("%s: lists multiple .orig tarballs." % (dsc_filename))
399 if has_native_tar > 1:
400 rejmsg.append("%s: lists multiple native tarballs." % (dsc_filename))
401 if has_debian_tar > 1 or has_debian_diff > 1:
402 rejmsg.append("%s: lists multiple debian diff/tarballs." % (dsc_filename))
404 for format in srcformats:
405 if format.re_format.match(dsc['format']):
406 msgs = format.reject_msgs(
415 rejmsg.extend(['%s: %s' % (dsc_filename, x) for x in msgs])
420 ################################################################################
422 def check_hash_fields(what, manifest):
424 check_hash_fields ensures that there are no checksum fields in the
425 given dict that we do not know about.
429 hashes = map(lambda x: x[0], known_hashes)
430 for field in manifest:
431 if field.startswith("checksums-"):
432 hashname = field.split("-",1)[1]
433 if hashname not in hashes:
434 rejmsg.append("Unsupported checksum field for %s "\
435 "in %s" % (hashname, what))
438 ################################################################################
440 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
441 if format >= version:
442 # The version should contain the specified hash.
445 # Import hashes from the changes
446 rejmsg = parse_checksums(".changes", files, changes, hashname)
450 # We need to calculate the hash because it can't possibly
453 return func(".changes", files, hashname, hashfunc)
455 # We could add the orig which might be in the pool to the files dict to
456 # access the checksums easily.
458 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
460 ensure_dsc_hashes' task is to ensure that each and every *present* hash
461 in the dsc is correct, i.e. identical to the changes file and if necessary
462 the pool. The latter task is delegated to check_hash.
466 if not dsc.has_key('Checksums-%s' % (hashname,)):
468 # Import hashes from the dsc
469 parse_checksums(".dsc", dsc_files, dsc, hashname)
471 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
474 ################################################################################
476 def parse_checksums(where, files, manifest, hashname):
478 field = 'checksums-%s' % hashname
479 if not field in manifest:
481 for line in manifest[field].split('\n'):
484 clist = line.strip().split(' ')
486 checksum, size, checkfile = clist
488 rejmsg.append("Cannot parse checksum line [%s]" % (line))
490 if not files.has_key(checkfile):
491 # TODO: check for the file's entry in the original files dict, not
492 # the one modified by (auto)byhand and other weird stuff
493 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
494 # (file, hashname, where))
496 if not files[checkfile]["size"] == size:
497 rejmsg.append("%s: size differs for files and checksums-%s entry "\
498 "in %s" % (checkfile, hashname, where))
500 files[checkfile][hash_key(hashname)] = checksum
501 for f in files.keys():
502 if not files[f].has_key(hash_key(hashname)):
503 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
507 ################################################################################
509 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
511 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
514 # Make sure we have a Files: field to parse...
515 if not changes.has_key(field):
516 raise NoFilesFieldError
518 # Make sure we recognise the format of the Files: field
519 format = re_verwithext.search(changes.get("format", "0.0"))
521 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
523 format = format.groups()
524 if format[1] == None:
525 format = int(float(format[0])), 0, format[2]
527 format = int(format[0]), int(format[1]), format[2]
528 if format[2] == None:
532 # format = (0,0) are missing format headers of which we still
533 # have some in the archive.
534 if format != (1,0) and format != (0,0) and \
535 format != (3,0,"quilt") and format != (3,0,"native"):
536 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
538 if (format < (1,5) or format > (1,8)):
539 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
540 if field != "files" and format < (1,8):
541 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
543 includes_section = (not is_a_dsc) and field == "files"
545 # Parse each entry/line:
546 for i in changes[field].split('\n'):
550 section = priority = ""
553 (md5, size, section, priority, name) = s
555 (md5, size, name) = s
557 raise ParseChangesError, i
564 (section, component) = extract_component_from_section(section)
566 files[name] = Dict(size=size, section=section,
567 priority=priority, component=component)
568 files[name][hashname] = md5
572 ################################################################################
574 def send_mail (message, filename=""):
575 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
577 # If we've been passed a string dump it into a temporary file
579 (fd, filename) = tempfile.mkstemp()
580 os.write (fd, message)
583 if Cnf.has_key("Dinstall::MailWhiteList") and \
584 Cnf["Dinstall::MailWhiteList"] != "":
585 message_in = open_file(filename)
586 message_raw = modemail.message_from_file(message_in)
590 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
592 for line in whitelist_in:
593 if not re_whitespace_comment.match(line):
594 if re_re_mark.match(line):
595 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
597 whitelist.append(re.compile(re.escape(line.strip())))
602 fields = ["To", "Bcc", "Cc"]
605 value = message_raw.get(field, None)
608 for item in value.split(","):
609 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
615 if not mail_whitelisted:
616 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
620 # Doesn't have any mail in whitelist so remove the header
622 del message_raw[field]
624 message_raw.replace_header(field, string.join(match, ", "))
626 # Change message fields in order if we don't have a To header
627 if not message_raw.has_key("To"):
630 if message_raw.has_key(field):
631 message_raw[fields[-1]] = message_raw[field]
632 del message_raw[field]
635 # Clean up any temporary files
636 # and return, as we removed all recipients.
638 os.unlink (filename);
641 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
642 os.write (fd, message_raw.as_string(True));
646 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
648 raise SendmailFailedError, output
650 # Clean up any temporary files
654 ################################################################################
656 def poolify (source, component):
659 if source[:3] == "lib":
660 return component + source[:4] + '/' + source + '/'
662 return component + source[:1] + '/' + source + '/'
664 ################################################################################
666 def move (src, dest, overwrite = 0, perms = 0664):
667 if os.path.exists(dest) and os.path.isdir(dest):
670 dest_dir = os.path.dirname(dest)
671 if not os.path.exists(dest_dir):
672 umask = os.umask(00000)
673 os.makedirs(dest_dir, 02775)
675 #print "Moving %s to %s..." % (src, dest)
676 if os.path.exists(dest) and os.path.isdir(dest):
677 dest += '/' + os.path.basename(src)
678 # Don't overwrite unless forced to
679 if os.path.exists(dest):
681 fubar("Can't move %s to %s - file already exists." % (src, dest))
683 if not os.access(dest, os.W_OK):
684 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
685 shutil.copy2(src, dest)
686 os.chmod(dest, perms)
689 def copy (src, dest, overwrite = 0, perms = 0664):
690 if os.path.exists(dest) and os.path.isdir(dest):
693 dest_dir = os.path.dirname(dest)
694 if not os.path.exists(dest_dir):
695 umask = os.umask(00000)
696 os.makedirs(dest_dir, 02775)
698 #print "Copying %s to %s..." % (src, dest)
699 if os.path.exists(dest) and os.path.isdir(dest):
700 dest += '/' + os.path.basename(src)
701 # Don't overwrite unless forced to
702 if os.path.exists(dest):
704 raise FileExistsError
706 if not os.access(dest, os.W_OK):
707 raise CantOverwriteError
708 shutil.copy2(src, dest)
709 os.chmod(dest, perms)
711 ################################################################################
714 res = socket.gethostbyaddr(socket.gethostname())
715 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
716 if database_hostname:
717 return database_hostname
721 def which_conf_file ():
722 res = socket.gethostbyaddr(socket.gethostname())
723 # In case we allow local config files per user, try if one exists
724 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
725 homedir = os.getenv("HOME")
726 confpath = os.path.join(homedir, "/etc/dak.conf")
727 if os.path.exists(confpath):
728 apt_pkg.ReadConfigFileISC(Cnf,default_config)
730 # We are still in here, so there is no local config file or we do
731 # not allow local files. Do the normal stuff.
732 if Cnf.get("Config::" + res[0] + "::DakConfig"):
733 return Cnf["Config::" + res[0] + "::DakConfig"]
735 return default_config
737 def which_apt_conf_file ():
738 res = socket.gethostbyaddr(socket.gethostname())
739 # In case we allow local config files per user, try if one exists
740 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
741 homedir = os.getenv("HOME")
742 confpath = os.path.join(homedir, "/etc/dak.conf")
743 if os.path.exists(confpath):
744 apt_pkg.ReadConfigFileISC(Cnf,default_config)
746 if Cnf.get("Config::" + res[0] + "::AptConfig"):
747 return Cnf["Config::" + res[0] + "::AptConfig"]
749 return default_apt_config
751 def which_alias_file():
752 hostname = socket.gethostbyaddr(socket.gethostname())[0]
753 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
754 if os.path.exists(aliasfn):
759 ################################################################################
761 def TemplateSubst(map, filename):
762 """ Perform a substition of template """
763 templatefile = open_file(filename)
764 template = templatefile.read()
766 template = template.replace(x, str(map[x]))
770 ################################################################################
772 def fubar(msg, exit_code=1):
773 sys.stderr.write("E: %s\n" % (msg))
777 sys.stderr.write("W: %s\n" % (msg))
779 ################################################################################
781 # Returns the user name with a laughable attempt at rfc822 conformancy
782 # (read: removing stray periods).
784 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
787 return pwd.getpwuid(os.getuid())[0]
789 ################################################################################
799 return ("%d%s" % (c, t))
801 ################################################################################
803 def cc_fix_changes (changes):
804 o = changes.get("architecture", "")
806 del changes["architecture"]
807 changes["architecture"] = {}
809 changes["architecture"][j] = 1
811 def changes_compare (a, b):
812 """ Sort by source name, source version, 'have source', and then by filename """
814 a_changes = parse_changes(a)
819 b_changes = parse_changes(b)
823 cc_fix_changes (a_changes)
824 cc_fix_changes (b_changes)
826 # Sort by source name
827 a_source = a_changes.get("source")
828 b_source = b_changes.get("source")
829 q = cmp (a_source, b_source)
833 # Sort by source version
834 a_version = a_changes.get("version", "0")
835 b_version = b_changes.get("version", "0")
836 q = apt_pkg.VersionCompare(a_version, b_version)
840 # Sort by 'have source'
841 a_has_source = a_changes["architecture"].get("source")
842 b_has_source = b_changes["architecture"].get("source")
843 if a_has_source and not b_has_source:
845 elif b_has_source and not a_has_source:
848 # Fall back to sort by filename
851 ################################################################################
853 def find_next_free (dest, too_many=100):
856 while os.path.exists(dest) and extra < too_many:
857 dest = orig_dest + '.' + repr(extra)
859 if extra >= too_many:
860 raise NoFreeFilenameError
863 ################################################################################
865 def result_join (original, sep = '\t'):
867 for i in xrange(len(original)):
868 if original[i] == None:
869 resultlist.append("")
871 resultlist.append(original[i])
872 return sep.join(resultlist)
874 ################################################################################
876 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
878 for line in str.split('\n'):
880 if line or include_blank_lines:
881 out += "%s%s\n" % (prefix, line)
882 # Strip trailing new line
887 ################################################################################
889 def validate_changes_file_arg(filename, require_changes=1):
891 'filename' is either a .changes or .dak file. If 'filename' is a
892 .dak file, it's changed to be the corresponding .changes file. The
893 function then checks if the .changes file a) exists and b) is
894 readable and returns the .changes filename if so. If there's a
895 problem, the next action depends on the option 'require_changes'
898 - If 'require_changes' == -1, errors are ignored and the .changes
899 filename is returned.
900 - If 'require_changes' == 0, a warning is given and 'None' is returned.
901 - If 'require_changes' == 1, a fatal error is raised.
906 orig_filename = filename
907 if filename.endswith(".dak"):
908 filename = filename[:-4]+".changes"
910 if not filename.endswith(".changes"):
911 error = "invalid file type; not a changes file"
913 if not os.access(filename,os.R_OK):
914 if os.path.exists(filename):
915 error = "permission denied"
917 error = "file not found"
920 if require_changes == 1:
921 fubar("%s: %s." % (orig_filename, error))
922 elif require_changes == 0:
923 warn("Skipping %s - %s" % (orig_filename, error))
925 else: # We only care about the .dak file
930 ################################################################################
933 return (arch != "source" and arch != "all")
935 ################################################################################
937 def join_with_commas_and(list):
938 if len(list) == 0: return "nothing"
939 if len(list) == 1: return list[0]
940 return ", ".join(list[:-1]) + " and " + list[-1]
942 ################################################################################
947 (pkg, version, constraint) = atom
949 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
952 pp_deps.append(pp_dep)
953 return " |".join(pp_deps)
955 ################################################################################
960 ################################################################################
962 def parse_args(Options):
963 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
964 # XXX: This should go away and everything which calls it be converted
965 # to use SQLA properly. For now, we'll just fix it not to use
966 # the old Pg interface though
967 session = DBConn().session()
971 for suitename in split_args(Options["Suite"]):
972 suite = get_suite(suitename, session=session)
973 if suite.suite_id is None:
974 warn("suite '%s' not recognised." % (suite.suite_name))
976 suite_ids_list.append(suite.suite_id)
978 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
980 fubar("No valid suite given.")
985 if Options["Component"]:
986 component_ids_list = []
987 for componentname in split_args(Options["Component"]):
988 component = get_component(componentname, session=session)
989 if component is None:
990 warn("component '%s' not recognised." % (componentname))
992 component_ids_list.append(component.component_id)
993 if component_ids_list:
994 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
996 fubar("No valid component given.")
1000 # Process architecture
1001 con_architectures = ""
1003 if Options["Architecture"]:
1005 for archname in split_args(Options["Architecture"]):
1006 if archname == "source":
1009 arch = get_architecture(archname, session=session)
1011 warn("architecture '%s' not recognised." % (archname))
1013 arch_ids_list.append(arch.arch_id)
1015 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1017 if not check_source:
1018 fubar("No valid architecture given.")
1022 return (con_suites, con_architectures, con_components, check_source)
1024 ################################################################################
1026 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1027 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1030 tb = sys.exc_info()[2]
1037 frame = frame.f_back
1039 traceback.print_exc()
1041 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1042 frame.f_code.co_filename,
1044 for key, value in frame.f_locals.items():
1045 print "\t%20s = " % key,
1049 print "<unable to print>"
1051 ################################################################################
1053 def try_with_debug(function):
1061 ################################################################################
1063 def arch_compare_sw (a, b):
1065 Function for use in sorting lists of architectures.
1067 Sorts normally except that 'source' dominates all others.
1070 if a == "source" and b == "source":
1079 ################################################################################
1081 def split_args (s, dwim=1):
1083 Split command line arguments which can be separated by either commas
1084 or whitespace. If dwim is set, it will complain about string ending
1085 in comma since this usually means someone did 'dak ls -a i386, m68k
1086 foo' or something and the inevitable confusion resulting from 'm68k'
1087 being treated as an argument is undesirable.
1090 if s.find(",") == -1:
1093 if s[-1:] == "," and dwim:
1094 fubar("split_args: found trailing comma, spurious space maybe?")
1097 ################################################################################
1099 def Dict(**dict): return dict
1101 ########################################
1103 def gpgv_get_status_output(cmd, status_read, status_write):
1105 Our very own version of commands.getouputstatus(), hacked to support
1109 cmd = ['/bin/sh', '-c', cmd]
1110 p2cread, p2cwrite = os.pipe()
1111 c2pread, c2pwrite = os.pipe()
1112 errout, errin = os.pipe()
1122 for i in range(3, 256):
1123 if i != status_write:
1129 os.execvp(cmd[0], cmd)
1135 os.dup2(c2pread, c2pwrite)
1136 os.dup2(errout, errin)
1138 output = status = ""
1140 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1143 r = os.read(fd, 8196)
1145 more_data.append(fd)
1146 if fd == c2pwrite or fd == errin:
1148 elif fd == status_read:
1151 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1153 pid, exit_status = os.waitpid(pid, 0)
1155 os.close(status_write)
1156 os.close(status_read)
1166 return output, status, exit_status
1168 ################################################################################
1170 def process_gpgv_output(status):
1171 # Process the status-fd output
1174 for line in status.split('\n'):
1178 split = line.split()
1180 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1182 (gnupg, keyword) = split[:2]
1183 if gnupg != "[GNUPG:]":
1184 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1187 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1188 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1191 keywords[keyword] = args
1193 return (keywords, internal_error)
1195 ################################################################################
1197 def retrieve_key (filename, keyserver=None, keyring=None):
1199 Retrieve the key that signed 'filename' from 'keyserver' and
1200 add it to 'keyring'. Returns nothing on success, or an error message
1204 # Defaults for keyserver and keyring
1206 keyserver = Cnf["Dinstall::KeyServer"]
1208 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1210 # Ensure the filename contains no shell meta-characters or other badness
1211 if not re_taint_free.match(filename):
1212 return "%s: tainted filename" % (filename)
1214 # Invoke gpgv on the file
1215 status_read, status_write = os.pipe()
1216 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1217 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1219 # Process the status-fd output
1220 (keywords, internal_error) = process_gpgv_output(status)
1222 return internal_error
1224 if not keywords.has_key("NO_PUBKEY"):
1225 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1227 fingerprint = keywords["NO_PUBKEY"][0]
1228 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1229 # it'll try to create a lockfile in /dev. A better solution might
1230 # be a tempfile or something.
1231 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1232 % (Cnf["Dinstall::SigningKeyring"])
1233 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1234 % (keyring, keyserver, fingerprint)
1235 (result, output) = commands.getstatusoutput(cmd)
1237 return "'%s' failed with exit code %s" % (cmd, result)
1241 ################################################################################
1243 def gpg_keyring_args(keyrings=None):
1245 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1247 return " ".join(["--keyring %s" % x for x in keyrings])
1249 ################################################################################
1251 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1253 Check the signature of a file and return the fingerprint if the
1254 signature is valid or 'None' if it's not. The first argument is the
1255 filename whose signature should be checked. The second argument is a
1256 reject function and is called when an error is found. The reject()
1257 function must allow for two arguments: the first is the error message,
1258 the second is an optional prefix string. It's possible for reject()
1259 to be called more than once during an invocation of check_signature().
1260 The third argument is optional and is the name of the files the
1261 detached signature applies to. The fourth argument is optional and is
1262 a *list* of keyrings to use. 'autofetch' can either be None, True or
1263 False. If None, the default behaviour specified in the config will be
1269 # Ensure the filename contains no shell meta-characters or other badness
1270 if not re_taint_free.match(sig_filename):
1271 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1272 return (None, rejects)
1274 if data_filename and not re_taint_free.match(data_filename):
1275 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1276 return (None, rejects)
1279 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1281 # Autofetch the signing key if that's enabled
1282 if autofetch == None:
1283 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1285 error_msg = retrieve_key(sig_filename)
1287 rejects.append(error_msg)
1288 return (None, rejects)
1290 # Build the command line
1291 status_read, status_write = os.pipe()
1292 cmd = "gpgv --status-fd %s %s %s %s" % (
1293 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1295 # Invoke gpgv on the file
1296 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1298 # Process the status-fd output
1299 (keywords, internal_error) = process_gpgv_output(status)
1301 # If we failed to parse the status-fd output, let's just whine and bail now
1303 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1304 rejects.append(internal_error, "")
1305 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1306 return (None, rejects)
1308 # Now check for obviously bad things in the processed output
1309 if keywords.has_key("KEYREVOKED"):
1310 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1311 if keywords.has_key("BADSIG"):
1312 rejects.append("bad signature on %s." % (sig_filename))
1313 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1314 rejects.append("failed to check signature on %s." % (sig_filename))
1315 if keywords.has_key("NO_PUBKEY"):
1316 args = keywords["NO_PUBKEY"]
1319 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1320 if keywords.has_key("BADARMOR"):
1321 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1322 if keywords.has_key("NODATA"):
1323 rejects.append("no signature found in %s." % (sig_filename))
1324 if keywords.has_key("EXPKEYSIG"):
1325 args = keywords["EXPKEYSIG"]
1328 rejects.append("Signature made by expired key 0x%s" % (key))
1329 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1330 args = keywords["KEYEXPIRED"]
1334 if timestamp.count("T") == 0:
1336 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1338 expiredate = "unknown (%s)" % (timestamp)
1340 expiredate = timestamp
1341 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1343 if len(rejects) > 0:
1344 return (None, rejects)
1346 # Next check gpgv exited with a zero return code
1348 rejects.append("gpgv failed while checking %s." % (sig_filename))
1350 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1352 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1353 return (None, rejects)
1355 # Sanity check the good stuff we expect
1356 if not keywords.has_key("VALIDSIG"):
1357 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1359 args = keywords["VALIDSIG"]
1361 rejects.append("internal error while checking signature on %s." % (sig_filename))
1363 fingerprint = args[0]
1364 if not keywords.has_key("GOODSIG"):
1365 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1366 if not keywords.has_key("SIG_ID"):
1367 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1369 # Finally ensure there's not something we don't recognise
1370 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1371 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1372 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1374 for keyword in keywords.keys():
1375 if not known_keywords.has_key(keyword):
1376 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1378 if len(rejects) > 0:
1379 return (None, rejects)
1381 return (fingerprint, [])
1383 ################################################################################
1385 def gpg_get_key_addresses(fingerprint):
1386 """retreive email addresses from gpg key uids for a given fingerprint"""
1387 addresses = key_uid_email_cache.get(fingerprint)
1388 if addresses != None:
1391 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1392 % (gpg_keyring_args(), fingerprint)
1393 (result, output) = commands.getstatusoutput(cmd)
1395 for l in output.split('\n'):
1396 m = re_gpg_uid.match(l)
1398 addresses.add(m.group(1))
1399 key_uid_email_cache[fingerprint] = addresses
1402 ################################################################################
1404 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1406 def wrap(paragraph, max_length, prefix=""):
1410 words = paragraph.split()
1413 word_size = len(word)
1414 if word_size > max_length:
1416 s += line + '\n' + prefix
1417 s += word + '\n' + prefix
1420 new_length = len(line) + word_size + 1
1421 if new_length > max_length:
1422 s += line + '\n' + prefix
1435 ################################################################################
1437 def clean_symlink (src, dest, root):
1439 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1442 src = src.replace(root, '', 1)
1443 dest = dest.replace(root, '', 1)
1444 dest = os.path.dirname(dest)
1445 new_src = '../' * len(dest.split('/'))
1446 return new_src + src
1448 ################################################################################
1450 def temp_filename(directory=None, prefix="dak", suffix=""):
1452 Return a secure and unique filename by pre-creating it.
1453 If 'directory' is non-null, it will be the directory the file is pre-created in.
1454 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1455 If 'suffix' is non-null, the filename will end with it.
1457 Returns a pair (fd, name).
1460 return tempfile.mkstemp(suffix, prefix, directory)
1462 ################################################################################
1464 def temp_dirname(parent=None, prefix="dak", suffix=""):
1466 Return a secure and unique directory by pre-creating it.
1467 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1468 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1469 If 'suffix' is non-null, the filename will end with it.
1471 Returns a pathname to the new directory
1474 return tempfile.mkdtemp(suffix, prefix, parent)
1476 ################################################################################
1478 def is_email_alias(email):
1479 """ checks if the user part of the email is listed in the alias file """
1481 if alias_cache == None:
1482 aliasfn = which_alias_file()
1485 for l in open(aliasfn):
1486 alias_cache.add(l.split(':')[0])
1487 uid = email.split('@')[0]
1488 return uid in alias_cache
1490 ################################################################################
1492 def get_changes_files(dir):
1494 Takes a directory and lists all .changes files in it (as well as chdir'ing
1495 to the directory; this is due to broken behaviour on the part of p-u/p-a
1496 when you're not in the right place)
1498 Returns a list of filenames
1501 # Much of the rest of p-u/p-a depends on being in the right place
1503 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1505 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1507 return changes_files
1509 ################################################################################
1513 Cnf = apt_pkg.newConfiguration()
1514 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1516 if which_conf_file() != default_config:
1517 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1519 ###############################################################################