2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, \
49 re_re_mark, re_whitespace_comment, re_issource
51 from srcformats import srcformats
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 ################################################################################
69 """ Escape html chars """
70 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
72 ################################################################################
74 def open_file(filename, mode='r'):
76 Open C{file}, return fileobject.
78 @type filename: string
79 @param filename: path/filename to open
82 @param mode: open mode
85 @return: open fileobject
87 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
91 f = open(filename, mode)
93 raise CantOpenError, filename
96 ################################################################################
98 def our_raw_input(prompt=""):
100 sys.stdout.write(prompt)
106 sys.stderr.write("\nUser interrupt (^D).\n")
109 ################################################################################
111 def extract_component_from_section(section):
114 if section.find('/') != -1:
115 component = section.split('/')[0]
117 # Expand default component
119 if Cnf.has_key("Component::%s" % section):
124 return (section, component)
126 ################################################################################
128 def parse_deb822(contents, signing_rules=0):
132 # Split the lines in the input, keeping the linebreaks.
133 lines = contents.splitlines(True)
136 raise ParseChangesError, "[Empty changes file]"
138 # Reindex by line number so we can easily verify the format of
144 indexed_lines[index] = line[:-1]
148 num_of_lines = len(indexed_lines.keys())
151 while index < num_of_lines:
153 line = indexed_lines[index]
155 if signing_rules == 1:
157 if index > num_of_lines:
158 raise InvalidDscError, index
159 line = indexed_lines[index]
160 if not line.startswith("-----BEGIN PGP SIGNATURE"):
161 raise InvalidDscError, index
166 if line.startswith("-----BEGIN PGP SIGNATURE"):
168 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
170 if signing_rules == 1:
171 while index < num_of_lines and line != "":
173 line = indexed_lines[index]
175 # If we're not inside the signed data, don't process anything
176 if signing_rules >= 0 and not inside_signature:
178 slf = re_single_line_field.match(line)
180 field = slf.groups()[0].lower()
181 changes[field] = slf.groups()[1]
185 changes[field] += '\n'
187 mlf = re_multi_line_field.match(line)
190 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
191 if first == 1 and changes[field] != "":
192 changes[field] += '\n'
194 changes[field] += mlf.groups()[0] + '\n'
198 if signing_rules == 1 and inside_signature:
199 raise InvalidDscError, index
201 changes["filecontents"] = "".join(lines)
203 if changes.has_key("source"):
204 # Strip the source version in brackets from the source field,
205 # put it in the "source-version" field instead.
206 srcver = re_srchasver.search(changes["source"])
208 changes["source"] = srcver.group(1)
209 changes["source-version"] = srcver.group(2)
212 raise ParseChangesError, error
216 ################################################################################
218 def parse_changes(filename, signing_rules=0):
220 Parses a changes file and returns a dictionary where each field is a
221 key. The mandatory first argument is the filename of the .changes
224 signing_rules is an optional argument:
226 - If signing_rules == -1, no signature is required.
227 - If signing_rules == 0 (the default), a signature is required.
228 - If signing_rules == 1, it turns on the same strict format checking
231 The rules for (signing_rules == 1)-mode are:
233 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
234 followed by any PGP header data and must end with a blank line.
236 - The data section must end with a blank line and must be followed by
237 "-----BEGIN PGP SIGNATURE-----".
240 changes_in = open_file(filename)
241 content = changes_in.read()
244 unicode(content, 'utf-8')
246 raise ChangesUnicodeError, "Changes file not proper utf-8"
247 return parse_deb822(content, signing_rules)
249 ################################################################################
251 def hash_key(hashname):
252 return '%ssum' % hashname
254 ################################################################################
256 def create_hash(where, files, hashname, hashfunc):
258 create_hash extends the passed files dict with the given hash by
259 iterating over all files on disk and passing them to the hashing
264 for f in files.keys():
266 file_handle = open_file(f)
267 except CantOpenError:
268 rejmsg.append("Could not open file %s for checksumming" % (f))
271 files[f][hash_key(hashname)] = hashfunc(file_handle)
276 ################################################################################
278 def check_hash(where, files, hashname, hashfunc):
280 check_hash checks the given hash in the files dict against the actual
281 files on disk. The hash values need to be present consistently in
282 all file entries. It does not modify its input in any way.
286 for f in files.keys():
290 file_handle = open_file(f)
292 # Check for the hash entry, to not trigger a KeyError.
293 if not files[f].has_key(hash_key(hashname)):
294 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
298 # Actually check the hash for correctness.
299 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
300 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
302 except CantOpenError:
303 # TODO: This happens when the file is in the pool.
304 # warn("Cannot open file %s" % f)
311 ################################################################################
313 def check_size(where, files):
315 check_size checks the file sizes in the passed files dict against the
320 for f in files.keys():
325 # TODO: This happens when the file is in the pool.
329 actual_size = entry[stat.ST_SIZE]
330 size = int(files[f]["size"])
331 if size != actual_size:
332 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
333 % (f, actual_size, size, where))
336 ################################################################################
338 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
340 Verify that the files listed in the Files field of the .dsc are
341 those expected given the announced Format.
343 @type dsc_filename: string
344 @param dsc_filename: path of .dsc file
347 @param dsc: the content of the .dsc parsed by C{parse_changes()}
349 @type dsc_files: dict
350 @param dsc_files: the file list returned by C{build_file_list()}
353 @return: all errors detected
357 # Parse the file if needed
359 dsc = parse_changes(dsc_filename, signing_rules=1);
361 if dsc_files is None:
362 dsc_files = build_file_list(dsc, is_a_dsc=1)
364 # Ensure .dsc lists proper set of source files according to the format
366 has = defaultdict(lambda: 0)
369 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
370 (r'diff.gz', ('debian_diff',)),
371 (r'tar.gz', ('native_tar_gz', 'native_tar')),
372 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
373 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
374 (r'tar\.(gz|bz2)', ('native_tar',)),
375 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
378 for f in dsc_files.keys():
379 m = re_issource.match(f)
381 rejmsg.append("%s: %s in Files field not recognised as source."
385 # Populate 'has' dictionary by resolving keys in lookup table
387 for regex, keys in ftype_lookup:
388 if re.match(regex, m.group(3)):
394 # File does not match anything in lookup table; reject
396 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
398 # Check for multiple files
399 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
400 if has[file_type] > 1:
401 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
403 # Source format specific tests
404 for format in srcformats:
405 if format.re_format.match(dsc['format']):
407 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
413 ################################################################################
415 def check_hash_fields(what, manifest):
417 check_hash_fields ensures that there are no checksum fields in the
418 given dict that we do not know about.
422 hashes = map(lambda x: x[0], known_hashes)
423 for field in manifest:
424 if field.startswith("checksums-"):
425 hashname = field.split("-",1)[1]
426 if hashname not in hashes:
427 rejmsg.append("Unsupported checksum field for %s "\
428 "in %s" % (hashname, what))
431 ################################################################################
433 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
434 if format >= version:
435 # The version should contain the specified hash.
438 # Import hashes from the changes
439 rejmsg = parse_checksums(".changes", files, changes, hashname)
443 # We need to calculate the hash because it can't possibly
446 return func(".changes", files, hashname, hashfunc)
448 # We could add the orig which might be in the pool to the files dict to
449 # access the checksums easily.
451 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
453 ensure_dsc_hashes' task is to ensure that each and every *present* hash
454 in the dsc is correct, i.e. identical to the changes file and if necessary
455 the pool. The latter task is delegated to check_hash.
459 if not dsc.has_key('Checksums-%s' % (hashname,)):
461 # Import hashes from the dsc
462 parse_checksums(".dsc", dsc_files, dsc, hashname)
464 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
467 ################################################################################
469 def parse_checksums(where, files, manifest, hashname):
471 field = 'checksums-%s' % hashname
472 if not field in manifest:
474 for line in manifest[field].split('\n'):
477 clist = line.strip().split(' ')
479 checksum, size, checkfile = clist
481 rejmsg.append("Cannot parse checksum line [%s]" % (line))
483 if not files.has_key(checkfile):
484 # TODO: check for the file's entry in the original files dict, not
485 # the one modified by (auto)byhand and other weird stuff
486 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
487 # (file, hashname, where))
489 if not files[checkfile]["size"] == size:
490 rejmsg.append("%s: size differs for files and checksums-%s entry "\
491 "in %s" % (checkfile, hashname, where))
493 files[checkfile][hash_key(hashname)] = checksum
494 for f in files.keys():
495 if not files[f].has_key(hash_key(hashname)):
496 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
500 ################################################################################
502 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
504 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
507 # Make sure we have a Files: field to parse...
508 if not changes.has_key(field):
509 raise NoFilesFieldError
511 # Make sure we recognise the format of the Files: field
512 format = re_verwithext.search(changes.get("format", "0.0"))
514 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
516 format = format.groups()
517 if format[1] == None:
518 format = int(float(format[0])), 0, format[2]
520 format = int(format[0]), int(format[1]), format[2]
521 if format[2] == None:
525 # format = (0,0) are missing format headers of which we still
526 # have some in the archive.
527 if format != (1,0) and format != (0,0) and \
528 format != (3,0,"quilt") and format != (3,0,"native"):
529 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
531 if (format < (1,5) or format > (1,8)):
532 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
533 if field != "files" and format < (1,8):
534 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
536 includes_section = (not is_a_dsc) and field == "files"
538 # Parse each entry/line:
539 for i in changes[field].split('\n'):
543 section = priority = ""
546 (md5, size, section, priority, name) = s
548 (md5, size, name) = s
550 raise ParseChangesError, i
557 (section, component) = extract_component_from_section(section)
559 files[name] = Dict(size=size, section=section,
560 priority=priority, component=component)
561 files[name][hashname] = md5
565 ################################################################################
567 def send_mail (message, filename=""):
568 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
570 # If we've been passed a string dump it into a temporary file
572 (fd, filename) = tempfile.mkstemp()
573 os.write (fd, message)
576 if Cnf.has_key("Dinstall::MailWhiteList") and \
577 Cnf["Dinstall::MailWhiteList"] != "":
578 message_in = open_file(filename)
579 message_raw = modemail.message_from_file(message_in)
583 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
585 for line in whitelist_in:
586 if not re_whitespace_comment.match(line):
587 if re_re_mark.match(line):
588 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
590 whitelist.append(re.compile(re.escape(line.strip())))
595 fields = ["To", "Bcc", "Cc"]
598 value = message_raw.get(field, None)
601 for item in value.split(","):
602 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
608 if not mail_whitelisted:
609 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
613 # Doesn't have any mail in whitelist so remove the header
615 del message_raw[field]
617 message_raw.replace_header(field, string.join(match, ", "))
619 # Change message fields in order if we don't have a To header
620 if not message_raw.has_key("To"):
623 if message_raw.has_key(field):
624 message_raw[fields[-1]] = message_raw[field]
625 del message_raw[field]
628 # Clean up any temporary files
629 # and return, as we removed all recipients.
631 os.unlink (filename);
634 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
635 os.write (fd, message_raw.as_string(True));
639 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
641 raise SendmailFailedError, output
643 # Clean up any temporary files
647 ################################################################################
649 def poolify (source, component):
652 if source[:3] == "lib":
653 return component + source[:4] + '/' + source + '/'
655 return component + source[:1] + '/' + source + '/'
657 ################################################################################
659 def move (src, dest, overwrite = 0, perms = 0664):
660 if os.path.exists(dest) and os.path.isdir(dest):
663 dest_dir = os.path.dirname(dest)
664 if not os.path.exists(dest_dir):
665 umask = os.umask(00000)
666 os.makedirs(dest_dir, 02775)
668 #print "Moving %s to %s..." % (src, dest)
669 if os.path.exists(dest) and os.path.isdir(dest):
670 dest += '/' + os.path.basename(src)
671 # Don't overwrite unless forced to
672 if os.path.exists(dest):
674 fubar("Can't move %s to %s - file already exists." % (src, dest))
676 if not os.access(dest, os.W_OK):
677 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
678 shutil.copy2(src, dest)
679 os.chmod(dest, perms)
682 def copy (src, dest, overwrite = 0, perms = 0664):
683 if os.path.exists(dest) and os.path.isdir(dest):
686 dest_dir = os.path.dirname(dest)
687 if not os.path.exists(dest_dir):
688 umask = os.umask(00000)
689 os.makedirs(dest_dir, 02775)
691 #print "Copying %s to %s..." % (src, dest)
692 if os.path.exists(dest) and os.path.isdir(dest):
693 dest += '/' + os.path.basename(src)
694 # Don't overwrite unless forced to
695 if os.path.exists(dest):
697 raise FileExistsError
699 if not os.access(dest, os.W_OK):
700 raise CantOverwriteError
701 shutil.copy2(src, dest)
702 os.chmod(dest, perms)
704 ################################################################################
707 res = socket.gethostbyaddr(socket.gethostname())
708 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
709 if database_hostname:
710 return database_hostname
714 def which_conf_file ():
715 res = socket.gethostbyaddr(socket.gethostname())
716 # In case we allow local config files per user, try if one exists
717 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
718 homedir = os.getenv("HOME")
719 confpath = os.path.join(homedir, "/etc/dak.conf")
720 if os.path.exists(confpath):
721 apt_pkg.ReadConfigFileISC(Cnf,default_config)
723 # We are still in here, so there is no local config file or we do
724 # not allow local files. Do the normal stuff.
725 if Cnf.get("Config::" + res[0] + "::DakConfig"):
726 return Cnf["Config::" + res[0] + "::DakConfig"]
728 return default_config
730 def which_apt_conf_file ():
731 res = socket.gethostbyaddr(socket.gethostname())
732 # In case we allow local config files per user, try if one exists
733 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
734 homedir = os.getenv("HOME")
735 confpath = os.path.join(homedir, "/etc/dak.conf")
736 if os.path.exists(confpath):
737 apt_pkg.ReadConfigFileISC(Cnf,default_config)
739 if Cnf.get("Config::" + res[0] + "::AptConfig"):
740 return Cnf["Config::" + res[0] + "::AptConfig"]
742 return default_apt_config
744 def which_alias_file():
745 hostname = socket.gethostbyaddr(socket.gethostname())[0]
746 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
747 if os.path.exists(aliasfn):
752 ################################################################################
754 def TemplateSubst(map, filename):
755 """ Perform a substition of template """
756 templatefile = open_file(filename)
757 template = templatefile.read()
759 template = template.replace(x, str(map[x]))
763 ################################################################################
765 def fubar(msg, exit_code=1):
766 sys.stderr.write("E: %s\n" % (msg))
770 sys.stderr.write("W: %s\n" % (msg))
772 ################################################################################
774 # Returns the user name with a laughable attempt at rfc822 conformancy
775 # (read: removing stray periods).
777 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
780 return pwd.getpwuid(os.getuid())[0]
782 ################################################################################
792 return ("%d%s" % (c, t))
794 ################################################################################
796 def cc_fix_changes (changes):
797 o = changes.get("architecture", "")
799 del changes["architecture"]
800 changes["architecture"] = {}
802 changes["architecture"][j] = 1
804 def changes_compare (a, b):
805 """ Sort by source name, source version, 'have source', and then by filename """
807 a_changes = parse_changes(a)
812 b_changes = parse_changes(b)
816 cc_fix_changes (a_changes)
817 cc_fix_changes (b_changes)
819 # Sort by source name
820 a_source = a_changes.get("source")
821 b_source = b_changes.get("source")
822 q = cmp (a_source, b_source)
826 # Sort by source version
827 a_version = a_changes.get("version", "0")
828 b_version = b_changes.get("version", "0")
829 q = apt_pkg.VersionCompare(a_version, b_version)
833 # Sort by 'have source'
834 a_has_source = a_changes["architecture"].get("source")
835 b_has_source = b_changes["architecture"].get("source")
836 if a_has_source and not b_has_source:
838 elif b_has_source and not a_has_source:
841 # Fall back to sort by filename
844 ################################################################################
846 def find_next_free (dest, too_many=100):
849 while os.path.exists(dest) and extra < too_many:
850 dest = orig_dest + '.' + repr(extra)
852 if extra >= too_many:
853 raise NoFreeFilenameError
856 ################################################################################
858 def result_join (original, sep = '\t'):
860 for i in xrange(len(original)):
861 if original[i] == None:
862 resultlist.append("")
864 resultlist.append(original[i])
865 return sep.join(resultlist)
867 ################################################################################
869 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
871 for line in str.split('\n'):
873 if line or include_blank_lines:
874 out += "%s%s\n" % (prefix, line)
875 # Strip trailing new line
880 ################################################################################
882 def validate_changes_file_arg(filename, require_changes=1):
884 'filename' is either a .changes or .dak file. If 'filename' is a
885 .dak file, it's changed to be the corresponding .changes file. The
886 function then checks if the .changes file a) exists and b) is
887 readable and returns the .changes filename if so. If there's a
888 problem, the next action depends on the option 'require_changes'
891 - If 'require_changes' == -1, errors are ignored and the .changes
892 filename is returned.
893 - If 'require_changes' == 0, a warning is given and 'None' is returned.
894 - If 'require_changes' == 1, a fatal error is raised.
899 orig_filename = filename
900 if filename.endswith(".dak"):
901 filename = filename[:-4]+".changes"
903 if not filename.endswith(".changes"):
904 error = "invalid file type; not a changes file"
906 if not os.access(filename,os.R_OK):
907 if os.path.exists(filename):
908 error = "permission denied"
910 error = "file not found"
913 if require_changes == 1:
914 fubar("%s: %s." % (orig_filename, error))
915 elif require_changes == 0:
916 warn("Skipping %s - %s" % (orig_filename, error))
918 else: # We only care about the .dak file
923 ################################################################################
926 return (arch != "source" and arch != "all")
928 ################################################################################
930 def join_with_commas_and(list):
931 if len(list) == 0: return "nothing"
932 if len(list) == 1: return list[0]
933 return ", ".join(list[:-1]) + " and " + list[-1]
935 ################################################################################
940 (pkg, version, constraint) = atom
942 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
945 pp_deps.append(pp_dep)
946 return " |".join(pp_deps)
948 ################################################################################
953 ################################################################################
955 def parse_args(Options):
956 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
957 # XXX: This should go away and everything which calls it be converted
958 # to use SQLA properly. For now, we'll just fix it not to use
959 # the old Pg interface though
960 session = DBConn().session()
964 for suitename in split_args(Options["Suite"]):
965 suite = get_suite(suitename, session=session)
966 if suite.suite_id is None:
967 warn("suite '%s' not recognised." % (suite.suite_name))
969 suite_ids_list.append(suite.suite_id)
971 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
973 fubar("No valid suite given.")
978 if Options["Component"]:
979 component_ids_list = []
980 for componentname in split_args(Options["Component"]):
981 component = get_component(componentname, session=session)
982 if component is None:
983 warn("component '%s' not recognised." % (componentname))
985 component_ids_list.append(component.component_id)
986 if component_ids_list:
987 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
989 fubar("No valid component given.")
993 # Process architecture
994 con_architectures = ""
996 if Options["Architecture"]:
998 for archname in split_args(Options["Architecture"]):
999 if archname == "source":
1002 arch = get_architecture(archname, session=session)
1004 warn("architecture '%s' not recognised." % (archname))
1006 arch_ids_list.append(arch.arch_id)
1008 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1010 if not check_source:
1011 fubar("No valid architecture given.")
1015 return (con_suites, con_architectures, con_components, check_source)
1017 ################################################################################
1019 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1020 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1023 tb = sys.exc_info()[2]
1030 frame = frame.f_back
1032 traceback.print_exc()
1034 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1035 frame.f_code.co_filename,
1037 for key, value in frame.f_locals.items():
1038 print "\t%20s = " % key,
1042 print "<unable to print>"
1044 ################################################################################
1046 def try_with_debug(function):
1054 ################################################################################
1056 def arch_compare_sw (a, b):
1058 Function for use in sorting lists of architectures.
1060 Sorts normally except that 'source' dominates all others.
1063 if a == "source" and b == "source":
1072 ################################################################################
1074 def split_args (s, dwim=1):
1076 Split command line arguments which can be separated by either commas
1077 or whitespace. If dwim is set, it will complain about string ending
1078 in comma since this usually means someone did 'dak ls -a i386, m68k
1079 foo' or something and the inevitable confusion resulting from 'm68k'
1080 being treated as an argument is undesirable.
1083 if s.find(",") == -1:
1086 if s[-1:] == "," and dwim:
1087 fubar("split_args: found trailing comma, spurious space maybe?")
1090 ################################################################################
1092 def Dict(**dict): return dict
1094 ########################################
1096 def gpgv_get_status_output(cmd, status_read, status_write):
1098 Our very own version of commands.getouputstatus(), hacked to support
1102 cmd = ['/bin/sh', '-c', cmd]
1103 p2cread, p2cwrite = os.pipe()
1104 c2pread, c2pwrite = os.pipe()
1105 errout, errin = os.pipe()
1115 for i in range(3, 256):
1116 if i != status_write:
1122 os.execvp(cmd[0], cmd)
1128 os.dup2(c2pread, c2pwrite)
1129 os.dup2(errout, errin)
1131 output = status = ""
1133 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1136 r = os.read(fd, 8196)
1138 more_data.append(fd)
1139 if fd == c2pwrite or fd == errin:
1141 elif fd == status_read:
1144 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1146 pid, exit_status = os.waitpid(pid, 0)
1148 os.close(status_write)
1149 os.close(status_read)
1159 return output, status, exit_status
1161 ################################################################################
1163 def process_gpgv_output(status):
1164 # Process the status-fd output
1167 for line in status.split('\n'):
1171 split = line.split()
1173 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1175 (gnupg, keyword) = split[:2]
1176 if gnupg != "[GNUPG:]":
1177 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1180 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1181 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1184 keywords[keyword] = args
1186 return (keywords, internal_error)
1188 ################################################################################
1190 def retrieve_key (filename, keyserver=None, keyring=None):
1192 Retrieve the key that signed 'filename' from 'keyserver' and
1193 add it to 'keyring'. Returns nothing on success, or an error message
1197 # Defaults for keyserver and keyring
1199 keyserver = Cnf["Dinstall::KeyServer"]
1201 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1203 # Ensure the filename contains no shell meta-characters or other badness
1204 if not re_taint_free.match(filename):
1205 return "%s: tainted filename" % (filename)
1207 # Invoke gpgv on the file
1208 status_read, status_write = os.pipe()
1209 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1210 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1212 # Process the status-fd output
1213 (keywords, internal_error) = process_gpgv_output(status)
1215 return internal_error
1217 if not keywords.has_key("NO_PUBKEY"):
1218 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1220 fingerprint = keywords["NO_PUBKEY"][0]
1221 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1222 # it'll try to create a lockfile in /dev. A better solution might
1223 # be a tempfile or something.
1224 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1225 % (Cnf["Dinstall::SigningKeyring"])
1226 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1227 % (keyring, keyserver, fingerprint)
1228 (result, output) = commands.getstatusoutput(cmd)
1230 return "'%s' failed with exit code %s" % (cmd, result)
1234 ################################################################################
1236 def gpg_keyring_args(keyrings=None):
1238 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1240 return " ".join(["--keyring %s" % x for x in keyrings])
1242 ################################################################################
1244 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1246 Check the signature of a file and return the fingerprint if the
1247 signature is valid or 'None' if it's not. The first argument is the
1248 filename whose signature should be checked. The second argument is a
1249 reject function and is called when an error is found. The reject()
1250 function must allow for two arguments: the first is the error message,
1251 the second is an optional prefix string. It's possible for reject()
1252 to be called more than once during an invocation of check_signature().
1253 The third argument is optional and is the name of the files the
1254 detached signature applies to. The fourth argument is optional and is
1255 a *list* of keyrings to use. 'autofetch' can either be None, True or
1256 False. If None, the default behaviour specified in the config will be
1262 # Ensure the filename contains no shell meta-characters or other badness
1263 if not re_taint_free.match(sig_filename):
1264 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1265 return (None, rejects)
1267 if data_filename and not re_taint_free.match(data_filename):
1268 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1269 return (None, rejects)
1272 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1274 # Autofetch the signing key if that's enabled
1275 if autofetch == None:
1276 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1278 error_msg = retrieve_key(sig_filename)
1280 rejects.append(error_msg)
1281 return (None, rejects)
1283 # Build the command line
1284 status_read, status_write = os.pipe()
1285 cmd = "gpgv --status-fd %s %s %s %s" % (
1286 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1288 # Invoke gpgv on the file
1289 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1291 # Process the status-fd output
1292 (keywords, internal_error) = process_gpgv_output(status)
1294 # If we failed to parse the status-fd output, let's just whine and bail now
1296 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1297 rejects.append(internal_error, "")
1298 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1299 return (None, rejects)
1301 # Now check for obviously bad things in the processed output
1302 if keywords.has_key("KEYREVOKED"):
1303 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1304 if keywords.has_key("BADSIG"):
1305 rejects.append("bad signature on %s." % (sig_filename))
1306 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1307 rejects.append("failed to check signature on %s." % (sig_filename))
1308 if keywords.has_key("NO_PUBKEY"):
1309 args = keywords["NO_PUBKEY"]
1312 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1313 if keywords.has_key("BADARMOR"):
1314 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1315 if keywords.has_key("NODATA"):
1316 rejects.append("no signature found in %s." % (sig_filename))
1317 if keywords.has_key("EXPKEYSIG"):
1318 args = keywords["EXPKEYSIG"]
1321 rejects.append("Signature made by expired key 0x%s" % (key))
1322 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1323 args = keywords["KEYEXPIRED"]
1327 if timestamp.count("T") == 0:
1329 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1331 expiredate = "unknown (%s)" % (timestamp)
1333 expiredate = timestamp
1334 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1336 if len(rejects) > 0:
1337 return (None, rejects)
1339 # Next check gpgv exited with a zero return code
1341 rejects.append("gpgv failed while checking %s." % (sig_filename))
1343 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1345 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1346 return (None, rejects)
1348 # Sanity check the good stuff we expect
1349 if not keywords.has_key("VALIDSIG"):
1350 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1352 args = keywords["VALIDSIG"]
1354 rejects.append("internal error while checking signature on %s." % (sig_filename))
1356 fingerprint = args[0]
1357 if not keywords.has_key("GOODSIG"):
1358 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1359 if not keywords.has_key("SIG_ID"):
1360 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1362 # Finally ensure there's not something we don't recognise
1363 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1364 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1365 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1367 for keyword in keywords.keys():
1368 if not known_keywords.has_key(keyword):
1369 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1371 if len(rejects) > 0:
1372 return (None, rejects)
1374 return (fingerprint, [])
1376 ################################################################################
1378 def gpg_get_key_addresses(fingerprint):
1379 """retreive email addresses from gpg key uids for a given fingerprint"""
1380 addresses = key_uid_email_cache.get(fingerprint)
1381 if addresses != None:
1384 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1385 % (gpg_keyring_args(), fingerprint)
1386 (result, output) = commands.getstatusoutput(cmd)
1388 for l in output.split('\n'):
1389 m = re_gpg_uid.match(l)
1391 addresses.add(m.group(1))
1392 key_uid_email_cache[fingerprint] = addresses
1395 ################################################################################
1397 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1399 def wrap(paragraph, max_length, prefix=""):
1403 words = paragraph.split()
1406 word_size = len(word)
1407 if word_size > max_length:
1409 s += line + '\n' + prefix
1410 s += word + '\n' + prefix
1413 new_length = len(line) + word_size + 1
1414 if new_length > max_length:
1415 s += line + '\n' + prefix
1428 ################################################################################
1430 def clean_symlink (src, dest, root):
1432 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1435 src = src.replace(root, '', 1)
1436 dest = dest.replace(root, '', 1)
1437 dest = os.path.dirname(dest)
1438 new_src = '../' * len(dest.split('/'))
1439 return new_src + src
1441 ################################################################################
1443 def temp_filename(directory=None, prefix="dak", suffix=""):
1445 Return a secure and unique filename by pre-creating it.
1446 If 'directory' is non-null, it will be the directory the file is pre-created in.
1447 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1448 If 'suffix' is non-null, the filename will end with it.
1450 Returns a pair (fd, name).
1453 return tempfile.mkstemp(suffix, prefix, directory)
1455 ################################################################################
1457 def temp_dirname(parent=None, prefix="dak", suffix=""):
1459 Return a secure and unique directory by pre-creating it.
1460 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1461 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1462 If 'suffix' is non-null, the filename will end with it.
1464 Returns a pathname to the new directory
1467 return tempfile.mkdtemp(suffix, prefix, parent)
1469 ################################################################################
1471 def is_email_alias(email):
1472 """ checks if the user part of the email is listed in the alias file """
1474 if alias_cache == None:
1475 aliasfn = which_alias_file()
1478 for l in open(aliasfn):
1479 alias_cache.add(l.split(':')[0])
1480 uid = email.split('@')[0]
1481 return uid in alias_cache
1483 ################################################################################
1485 def get_changes_files(dir):
1487 Takes a directory and lists all .changes files in it (as well as chdir'ing
1488 to the directory; this is due to broken behaviour on the part of p-u/p-a
1489 when you're not in the right place)
1491 Returns a list of filenames
1494 # Much of the rest of p-u/p-a depends on being in the right place
1496 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1498 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1500 return changes_files
1502 ################################################################################
1506 Cnf = apt_pkg.newConfiguration()
1507 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1509 if which_conf_file() != default_config:
1510 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1512 ###############################################################################