2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite
43 from dak_exceptions import *
44 from textutils import fix_maintainer
45 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
46 re_multi_line_field, re_srchasver, re_taint_free, \
47 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
50 from formats import parse_format, validate_changes_format
51 from srcformats import get_format_from_string
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
67 # code in lenny's Python. This also affects commands.getoutput and
69 def dak_getstatusoutput(cmd):
70 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
71 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
73 output = "".join(pipe.stdout.readlines())
75 if output[-1:] == '\n':
83 commands.getstatusoutput = dak_getstatusoutput
85 ################################################################################
88 """ Escape html chars """
89 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
91 ################################################################################
93 def open_file(filename, mode='r'):
95 Open C{file}, return fileobject.
97 @type filename: string
98 @param filename: path/filename to open
101 @param mode: open mode
104 @return: open fileobject
106 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
110 f = open(filename, mode)
112 raise CantOpenError, filename
115 ################################################################################
117 def our_raw_input(prompt=""):
121 sys.stdout.write(prompt)
130 sys.stderr.write("\nUser interrupt (^D).\n")
133 ################################################################################
135 def extract_component_from_section(section):
138 if section.find('/') != -1:
139 component = section.split('/')[0]
141 # Expand default component
143 if Cnf.has_key("Component::%s" % section):
148 return (section, component)
150 ################################################################################
152 def parse_deb822(contents, signing_rules=0):
156 # Split the lines in the input, keeping the linebreaks.
157 lines = contents.splitlines(True)
160 raise ParseChangesError, "[Empty changes file]"
162 # Reindex by line number so we can easily verify the format of
168 indexed_lines[index] = line[:-1]
172 num_of_lines = len(indexed_lines.keys())
175 while index < num_of_lines:
177 line = indexed_lines[index]
179 if signing_rules == 1:
181 if index > num_of_lines:
182 raise InvalidDscError, index
183 line = indexed_lines[index]
184 if not line.startswith("-----BEGIN PGP SIGNATURE"):
185 raise InvalidDscError, index
190 if line.startswith("-----BEGIN PGP SIGNATURE"):
192 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
194 if signing_rules == 1:
195 while index < num_of_lines and line != "":
197 line = indexed_lines[index]
199 # If we're not inside the signed data, don't process anything
200 if signing_rules >= 0 and not inside_signature:
202 slf = re_single_line_field.match(line)
204 field = slf.groups()[0].lower()
205 changes[field] = slf.groups()[1]
209 changes[field] += '\n'
211 mlf = re_multi_line_field.match(line)
214 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
215 if first == 1 and changes[field] != "":
216 changes[field] += '\n'
218 changes[field] += mlf.groups()[0] + '\n'
222 if signing_rules == 1 and inside_signature:
223 raise InvalidDscError, index
225 changes["filecontents"] = "".join(lines)
227 if changes.has_key("source"):
228 # Strip the source version in brackets from the source field,
229 # put it in the "source-version" field instead.
230 srcver = re_srchasver.search(changes["source"])
232 changes["source"] = srcver.group(1)
233 changes["source-version"] = srcver.group(2)
236 raise ParseChangesError, error
240 ################################################################################
242 def parse_changes(filename, signing_rules=0):
244 Parses a changes file and returns a dictionary where each field is a
245 key. The mandatory first argument is the filename of the .changes
248 signing_rules is an optional argument:
250 - If signing_rules == -1, no signature is required.
251 - If signing_rules == 0 (the default), a signature is required.
252 - If signing_rules == 1, it turns on the same strict format checking
255 The rules for (signing_rules == 1)-mode are:
257 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
258 followed by any PGP header data and must end with a blank line.
260 - The data section must end with a blank line and must be followed by
261 "-----BEGIN PGP SIGNATURE-----".
264 changes_in = open_file(filename)
265 content = changes_in.read()
268 unicode(content, 'utf-8')
270 raise ChangesUnicodeError, "Changes file not proper utf-8"
271 return parse_deb822(content, signing_rules)
273 ################################################################################
275 def hash_key(hashname):
276 return '%ssum' % hashname
278 ################################################################################
280 def create_hash(where, files, hashname, hashfunc):
282 create_hash extends the passed files dict with the given hash by
283 iterating over all files on disk and passing them to the hashing
288 for f in files.keys():
290 file_handle = open_file(f)
291 except CantOpenError:
292 rejmsg.append("Could not open file %s for checksumming" % (f))
295 files[f][hash_key(hashname)] = hashfunc(file_handle)
300 ################################################################################
302 def check_hash(where, files, hashname, hashfunc):
304 check_hash checks the given hash in the files dict against the actual
305 files on disk. The hash values need to be present consistently in
306 all file entries. It does not modify its input in any way.
310 for f in files.keys():
314 file_handle = open_file(f)
316 # Check for the hash entry, to not trigger a KeyError.
317 if not files[f].has_key(hash_key(hashname)):
318 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
322 # Actually check the hash for correctness.
323 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
324 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
326 except CantOpenError:
327 # TODO: This happens when the file is in the pool.
328 # warn("Cannot open file %s" % f)
335 ################################################################################
337 def check_size(where, files):
339 check_size checks the file sizes in the passed files dict against the
344 for f in files.keys():
349 # TODO: This happens when the file is in the pool.
353 actual_size = entry[stat.ST_SIZE]
354 size = int(files[f]["size"])
355 if size != actual_size:
356 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
357 % (f, actual_size, size, where))
360 ################################################################################
362 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
364 Verify that the files listed in the Files field of the .dsc are
365 those expected given the announced Format.
367 @type dsc_filename: string
368 @param dsc_filename: path of .dsc file
371 @param dsc: the content of the .dsc parsed by C{parse_changes()}
373 @type dsc_files: dict
374 @param dsc_files: the file list returned by C{build_file_list()}
377 @return: all errors detected
381 # Parse the file if needed
383 dsc = parse_changes(dsc_filename, signing_rules=1);
385 if dsc_files is None:
386 dsc_files = build_file_list(dsc, is_a_dsc=1)
388 # Ensure .dsc lists proper set of source files according to the format
390 has = defaultdict(lambda: 0)
393 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
394 (r'diff.gz', ('debian_diff',)),
395 (r'tar.gz', ('native_tar_gz', 'native_tar')),
396 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
397 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
398 (r'tar\.(gz|bz2)', ('native_tar',)),
399 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
402 for f in dsc_files.keys():
403 m = re_issource.match(f)
405 rejmsg.append("%s: %s in Files field not recognised as source."
409 # Populate 'has' dictionary by resolving keys in lookup table
411 for regex, keys in ftype_lookup:
412 if re.match(regex, m.group(3)):
418 # File does not match anything in lookup table; reject
420 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
422 # Check for multiple files
423 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
424 if has[file_type] > 1:
425 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
427 # Source format specific tests
429 format = get_format_from_string(dsc['format'])
431 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
434 except UnknownFormatError:
435 # Not an error here for now
440 ################################################################################
442 def check_hash_fields(what, manifest):
444 check_hash_fields ensures that there are no checksum fields in the
445 given dict that we do not know about.
449 hashes = map(lambda x: x[0], known_hashes)
450 for field in manifest:
451 if field.startswith("checksums-"):
452 hashname = field.split("-",1)[1]
453 if hashname not in hashes:
454 rejmsg.append("Unsupported checksum field for %s "\
455 "in %s" % (hashname, what))
458 ################################################################################
460 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
461 if format >= version:
462 # The version should contain the specified hash.
465 # Import hashes from the changes
466 rejmsg = parse_checksums(".changes", files, changes, hashname)
470 # We need to calculate the hash because it can't possibly
473 return func(".changes", files, hashname, hashfunc)
475 # We could add the orig which might be in the pool to the files dict to
476 # access the checksums easily.
478 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
480 ensure_dsc_hashes' task is to ensure that each and every *present* hash
481 in the dsc is correct, i.e. identical to the changes file and if necessary
482 the pool. The latter task is delegated to check_hash.
486 if not dsc.has_key('Checksums-%s' % (hashname,)):
488 # Import hashes from the dsc
489 parse_checksums(".dsc", dsc_files, dsc, hashname)
491 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
494 ################################################################################
496 def parse_checksums(where, files, manifest, hashname):
498 field = 'checksums-%s' % hashname
499 if not field in manifest:
501 for line in manifest[field].split('\n'):
504 clist = line.strip().split(' ')
506 checksum, size, checkfile = clist
508 rejmsg.append("Cannot parse checksum line [%s]" % (line))
510 if not files.has_key(checkfile):
511 # TODO: check for the file's entry in the original files dict, not
512 # the one modified by (auto)byhand and other weird stuff
513 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
514 # (file, hashname, where))
516 if not files[checkfile]["size"] == size:
517 rejmsg.append("%s: size differs for files and checksums-%s entry "\
518 "in %s" % (checkfile, hashname, where))
520 files[checkfile][hash_key(hashname)] = checksum
521 for f in files.keys():
522 if not files[f].has_key(hash_key(hashname)):
523 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
527 ################################################################################
529 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
531 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
534 # Make sure we have a Files: field to parse...
535 if not changes.has_key(field):
536 raise NoFilesFieldError
538 # Validate .changes Format: field
540 validate_changes_format(parse_format(changes['format']), field)
542 includes_section = (not is_a_dsc) and field == "files"
544 # Parse each entry/line:
545 for i in changes[field].split('\n'):
549 section = priority = ""
552 (md5, size, section, priority, name) = s
554 (md5, size, name) = s
556 raise ParseChangesError, i
563 (section, component) = extract_component_from_section(section)
565 files[name] = dict(size=size, section=section,
566 priority=priority, component=component)
567 files[name][hashname] = md5
571 ################################################################################
573 def send_mail (message, filename=""):
574 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
576 # Check whether we're supposed to be sending mail
577 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
580 # If we've been passed a string dump it into a temporary file
582 (fd, filename) = tempfile.mkstemp()
583 os.write (fd, message)
586 if Cnf.has_key("Dinstall::MailWhiteList") and \
587 Cnf["Dinstall::MailWhiteList"] != "":
588 message_in = open_file(filename)
589 message_raw = modemail.message_from_file(message_in)
593 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
595 for line in whitelist_in:
596 if not re_whitespace_comment.match(line):
597 if re_re_mark.match(line):
598 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
600 whitelist.append(re.compile(re.escape(line.strip())))
605 fields = ["To", "Bcc", "Cc"]
608 value = message_raw.get(field, None)
611 for item in value.split(","):
612 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
618 if not mail_whitelisted:
619 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
623 # Doesn't have any mail in whitelist so remove the header
625 del message_raw[field]
627 message_raw.replace_header(field, ', '.join(match))
629 # Change message fields in order if we don't have a To header
630 if not message_raw.has_key("To"):
633 if message_raw.has_key(field):
634 message_raw[fields[-1]] = message_raw[field]
635 del message_raw[field]
638 # Clean up any temporary files
639 # and return, as we removed all recipients.
641 os.unlink (filename);
644 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
645 os.write (fd, message_raw.as_string(True));
649 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
651 raise SendmailFailedError, output
653 # Clean up any temporary files
657 ################################################################################
659 def poolify (source, component):
662 if source[:3] == "lib":
663 return component + source[:4] + '/' + source + '/'
665 return component + source[:1] + '/' + source + '/'
667 ################################################################################
669 def move (src, dest, overwrite = 0, perms = 0664):
670 if os.path.exists(dest) and os.path.isdir(dest):
673 dest_dir = os.path.dirname(dest)
674 if not os.path.exists(dest_dir):
675 umask = os.umask(00000)
676 os.makedirs(dest_dir, 02775)
678 #print "Moving %s to %s..." % (src, dest)
679 if os.path.exists(dest) and os.path.isdir(dest):
680 dest += '/' + os.path.basename(src)
681 # Don't overwrite unless forced to
682 if os.path.exists(dest):
684 fubar("Can't move %s to %s - file already exists." % (src, dest))
686 if not os.access(dest, os.W_OK):
687 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
688 shutil.copy2(src, dest)
689 os.chmod(dest, perms)
692 def copy (src, dest, overwrite = 0, perms = 0664):
693 if os.path.exists(dest) and os.path.isdir(dest):
696 dest_dir = os.path.dirname(dest)
697 if not os.path.exists(dest_dir):
698 umask = os.umask(00000)
699 os.makedirs(dest_dir, 02775)
701 #print "Copying %s to %s..." % (src, dest)
702 if os.path.exists(dest) and os.path.isdir(dest):
703 dest += '/' + os.path.basename(src)
704 # Don't overwrite unless forced to
705 if os.path.exists(dest):
707 raise FileExistsError
709 if not os.access(dest, os.W_OK):
710 raise CantOverwriteError
711 shutil.copy2(src, dest)
712 os.chmod(dest, perms)
714 ################################################################################
717 res = socket.gethostbyaddr(socket.gethostname())
718 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
719 if database_hostname:
720 return database_hostname
724 def which_conf_file ():
725 if os.getenv('DAK_CONFIG'):
726 return os.getenv('DAK_CONFIG')
728 res = socket.gethostbyaddr(socket.gethostname())
729 # In case we allow local config files per user, try if one exists
730 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
731 homedir = os.getenv("HOME")
732 confpath = os.path.join(homedir, "/etc/dak.conf")
733 if os.path.exists(confpath):
734 apt_pkg.ReadConfigFileISC(Cnf,default_config)
736 # We are still in here, so there is no local config file or we do
737 # not allow local files. Do the normal stuff.
738 if Cnf.get("Config::" + res[0] + "::DakConfig"):
739 return Cnf["Config::" + res[0] + "::DakConfig"]
741 return default_config
743 def which_apt_conf_file ():
744 res = socket.gethostbyaddr(socket.gethostname())
745 # In case we allow local config files per user, try if one exists
746 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
747 homedir = os.getenv("HOME")
748 confpath = os.path.join(homedir, "/etc/dak.conf")
749 if os.path.exists(confpath):
750 apt_pkg.ReadConfigFileISC(Cnf,default_config)
752 if Cnf.get("Config::" + res[0] + "::AptConfig"):
753 return Cnf["Config::" + res[0] + "::AptConfig"]
755 return default_apt_config
757 def which_alias_file():
758 hostname = socket.gethostbyaddr(socket.gethostname())[0]
759 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
760 if os.path.exists(aliasfn):
765 ################################################################################
767 def TemplateSubst(subst_map, filename):
768 """ Perform a substition of template """
769 templatefile = open_file(filename)
770 template = templatefile.read()
771 for k, v in subst_map.iteritems():
772 template = template.replace(k, str(v))
776 ################################################################################
778 def fubar(msg, exit_code=1):
779 sys.stderr.write("E: %s\n" % (msg))
783 sys.stderr.write("W: %s\n" % (msg))
785 ################################################################################
787 # Returns the user name with a laughable attempt at rfc822 conformancy
788 # (read: removing stray periods).
790 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
793 return pwd.getpwuid(os.getuid())[0]
795 ################################################################################
805 return ("%d%s" % (c, t))
807 ################################################################################
809 def cc_fix_changes (changes):
810 o = changes.get("architecture", "")
812 del changes["architecture"]
813 changes["architecture"] = {}
815 changes["architecture"][j] = 1
817 def changes_compare (a, b):
818 """ Sort by source name, source version, 'have source', and then by filename """
820 a_changes = parse_changes(a)
825 b_changes = parse_changes(b)
829 cc_fix_changes (a_changes)
830 cc_fix_changes (b_changes)
832 # Sort by source name
833 a_source = a_changes.get("source")
834 b_source = b_changes.get("source")
835 q = cmp (a_source, b_source)
839 # Sort by source version
840 a_version = a_changes.get("version", "0")
841 b_version = b_changes.get("version", "0")
842 q = apt_pkg.VersionCompare(a_version, b_version)
846 # Sort by 'have source'
847 a_has_source = a_changes["architecture"].get("source")
848 b_has_source = b_changes["architecture"].get("source")
849 if a_has_source and not b_has_source:
851 elif b_has_source and not a_has_source:
854 # Fall back to sort by filename
857 ################################################################################
859 def find_next_free (dest, too_many=100):
862 while os.path.exists(dest) and extra < too_many:
863 dest = orig_dest + '.' + repr(extra)
865 if extra >= too_many:
866 raise NoFreeFilenameError
869 ################################################################################
871 def result_join (original, sep = '\t'):
873 for i in xrange(len(original)):
874 if original[i] == None:
875 resultlist.append("")
877 resultlist.append(original[i])
878 return sep.join(resultlist)
880 ################################################################################
882 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
884 for line in str.split('\n'):
886 if line or include_blank_lines:
887 out += "%s%s\n" % (prefix, line)
888 # Strip trailing new line
893 ################################################################################
895 def validate_changes_file_arg(filename, require_changes=1):
897 'filename' is either a .changes or .dak file. If 'filename' is a
898 .dak file, it's changed to be the corresponding .changes file. The
899 function then checks if the .changes file a) exists and b) is
900 readable and returns the .changes filename if so. If there's a
901 problem, the next action depends on the option 'require_changes'
904 - If 'require_changes' == -1, errors are ignored and the .changes
905 filename is returned.
906 - If 'require_changes' == 0, a warning is given and 'None' is returned.
907 - If 'require_changes' == 1, a fatal error is raised.
912 orig_filename = filename
913 if filename.endswith(".dak"):
914 filename = filename[:-4]+".changes"
916 if not filename.endswith(".changes"):
917 error = "invalid file type; not a changes file"
919 if not os.access(filename,os.R_OK):
920 if os.path.exists(filename):
921 error = "permission denied"
923 error = "file not found"
926 if require_changes == 1:
927 fubar("%s: %s." % (orig_filename, error))
928 elif require_changes == 0:
929 warn("Skipping %s - %s" % (orig_filename, error))
931 else: # We only care about the .dak file
936 ################################################################################
939 return (arch != "source" and arch != "all")
941 ################################################################################
943 def join_with_commas_and(list):
944 if len(list) == 0: return "nothing"
945 if len(list) == 1: return list[0]
946 return ", ".join(list[:-1]) + " and " + list[-1]
948 ################################################################################
953 (pkg, version, constraint) = atom
955 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
958 pp_deps.append(pp_dep)
959 return " |".join(pp_deps)
961 ################################################################################
966 ################################################################################
968 def parse_args(Options):
969 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
970 # XXX: This should go away and everything which calls it be converted
971 # to use SQLA properly. For now, we'll just fix it not to use
972 # the old Pg interface though
973 session = DBConn().session()
977 for suitename in split_args(Options["Suite"]):
978 suite = get_suite(suitename, session=session)
979 if suite.suite_id is None:
980 warn("suite '%s' not recognised." % (suite.suite_name))
982 suite_ids_list.append(suite.suite_id)
984 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
986 fubar("No valid suite given.")
991 if Options["Component"]:
992 component_ids_list = []
993 for componentname in split_args(Options["Component"]):
994 component = get_component(componentname, session=session)
995 if component is None:
996 warn("component '%s' not recognised." % (componentname))
998 component_ids_list.append(component.component_id)
999 if component_ids_list:
1000 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1002 fubar("No valid component given.")
1006 # Process architecture
1007 con_architectures = ""
1009 if Options["Architecture"]:
1011 for archname in split_args(Options["Architecture"]):
1012 if archname == "source":
1015 arch = get_architecture(archname, session=session)
1017 warn("architecture '%s' not recognised." % (archname))
1019 arch_ids_list.append(arch.arch_id)
1021 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1023 if not check_source:
1024 fubar("No valid architecture given.")
1028 return (con_suites, con_architectures, con_components, check_source)
1030 ################################################################################
1032 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1033 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1036 tb = sys.exc_info()[2]
1043 frame = frame.f_back
1045 traceback.print_exc()
1047 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1048 frame.f_code.co_filename,
1050 for key, value in frame.f_locals.items():
1051 print "\t%20s = " % key,
1055 print "<unable to print>"
1057 ################################################################################
1059 def try_with_debug(function):
1067 ################################################################################
1069 def arch_compare_sw (a, b):
1071 Function for use in sorting lists of architectures.
1073 Sorts normally except that 'source' dominates all others.
1076 if a == "source" and b == "source":
1085 ################################################################################
1087 def split_args (s, dwim=1):
1089 Split command line arguments which can be separated by either commas
1090 or whitespace. If dwim is set, it will complain about string ending
1091 in comma since this usually means someone did 'dak ls -a i386, m68k
1092 foo' or something and the inevitable confusion resulting from 'm68k'
1093 being treated as an argument is undesirable.
1096 if s.find(",") == -1:
1099 if s[-1:] == "," and dwim:
1100 fubar("split_args: found trailing comma, spurious space maybe?")
1103 ################################################################################
1105 def gpgv_get_status_output(cmd, status_read, status_write):
1107 Our very own version of commands.getouputstatus(), hacked to support
1111 cmd = ['/bin/sh', '-c', cmd]
1112 p2cread, p2cwrite = os.pipe()
1113 c2pread, c2pwrite = os.pipe()
1114 errout, errin = os.pipe()
1124 for i in range(3, 256):
1125 if i != status_write:
1131 os.execvp(cmd[0], cmd)
1137 os.dup2(c2pread, c2pwrite)
1138 os.dup2(errout, errin)
1140 output = status = ""
1142 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1145 r = os.read(fd, 8196)
1147 more_data.append(fd)
1148 if fd == c2pwrite or fd == errin:
1150 elif fd == status_read:
1153 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1155 pid, exit_status = os.waitpid(pid, 0)
1157 os.close(status_write)
1158 os.close(status_read)
1168 return output, status, exit_status
1170 ################################################################################
1172 def process_gpgv_output(status):
1173 # Process the status-fd output
1176 for line in status.split('\n'):
1180 split = line.split()
1182 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1184 (gnupg, keyword) = split[:2]
1185 if gnupg != "[GNUPG:]":
1186 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1189 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1190 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1193 keywords[keyword] = args
1195 return (keywords, internal_error)
1197 ################################################################################
1199 def retrieve_key (filename, keyserver=None, keyring=None):
1201 Retrieve the key that signed 'filename' from 'keyserver' and
1202 add it to 'keyring'. Returns nothing on success, or an error message
1206 # Defaults for keyserver and keyring
1208 keyserver = Cnf["Dinstall::KeyServer"]
1210 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1212 # Ensure the filename contains no shell meta-characters or other badness
1213 if not re_taint_free.match(filename):
1214 return "%s: tainted filename" % (filename)
1216 # Invoke gpgv on the file
1217 status_read, status_write = os.pipe()
1218 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1219 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1221 # Process the status-fd output
1222 (keywords, internal_error) = process_gpgv_output(status)
1224 return internal_error
1226 if not keywords.has_key("NO_PUBKEY"):
1227 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1229 fingerprint = keywords["NO_PUBKEY"][0]
1230 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1231 # it'll try to create a lockfile in /dev. A better solution might
1232 # be a tempfile or something.
1233 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1234 % (Cnf["Dinstall::SigningKeyring"])
1235 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1236 % (keyring, keyserver, fingerprint)
1237 (result, output) = commands.getstatusoutput(cmd)
1239 return "'%s' failed with exit code %s" % (cmd, result)
1243 ################################################################################
1245 def gpg_keyring_args(keyrings=None):
1247 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1249 return " ".join(["--keyring %s" % x for x in keyrings])
1251 ################################################################################
1253 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1255 Check the signature of a file and return the fingerprint if the
1256 signature is valid or 'None' if it's not. The first argument is the
1257 filename whose signature should be checked. The second argument is a
1258 reject function and is called when an error is found. The reject()
1259 function must allow for two arguments: the first is the error message,
1260 the second is an optional prefix string. It's possible for reject()
1261 to be called more than once during an invocation of check_signature().
1262 The third argument is optional and is the name of the files the
1263 detached signature applies to. The fourth argument is optional and is
1264 a *list* of keyrings to use. 'autofetch' can either be None, True or
1265 False. If None, the default behaviour specified in the config will be
1271 # Ensure the filename contains no shell meta-characters or other badness
1272 if not re_taint_free.match(sig_filename):
1273 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1274 return (None, rejects)
1276 if data_filename and not re_taint_free.match(data_filename):
1277 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1278 return (None, rejects)
1281 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1283 # Autofetch the signing key if that's enabled
1284 if autofetch == None:
1285 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1287 error_msg = retrieve_key(sig_filename)
1289 rejects.append(error_msg)
1290 return (None, rejects)
1292 # Build the command line
1293 status_read, status_write = os.pipe()
1294 cmd = "gpgv --status-fd %s %s %s %s" % (
1295 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1297 # Invoke gpgv on the file
1298 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1300 # Process the status-fd output
1301 (keywords, internal_error) = process_gpgv_output(status)
1303 # If we failed to parse the status-fd output, let's just whine and bail now
1305 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1306 rejects.append(internal_error, "")
1307 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1308 return (None, rejects)
1310 # Now check for obviously bad things in the processed output
1311 if keywords.has_key("KEYREVOKED"):
1312 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1313 if keywords.has_key("BADSIG"):
1314 rejects.append("bad signature on %s." % (sig_filename))
1315 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1316 rejects.append("failed to check signature on %s." % (sig_filename))
1317 if keywords.has_key("NO_PUBKEY"):
1318 args = keywords["NO_PUBKEY"]
1321 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1322 if keywords.has_key("BADARMOR"):
1323 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1324 if keywords.has_key("NODATA"):
1325 rejects.append("no signature found in %s." % (sig_filename))
1326 if keywords.has_key("EXPKEYSIG"):
1327 args = keywords["EXPKEYSIG"]
1330 rejects.append("Signature made by expired key 0x%s" % (key))
1331 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1332 args = keywords["KEYEXPIRED"]
1336 if timestamp.count("T") == 0:
1338 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1340 expiredate = "unknown (%s)" % (timestamp)
1342 expiredate = timestamp
1343 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1345 if len(rejects) > 0:
1346 return (None, rejects)
1348 # Next check gpgv exited with a zero return code
1350 rejects.append("gpgv failed while checking %s." % (sig_filename))
1352 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1354 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1355 return (None, rejects)
1357 # Sanity check the good stuff we expect
1358 if not keywords.has_key("VALIDSIG"):
1359 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1361 args = keywords["VALIDSIG"]
1363 rejects.append("internal error while checking signature on %s." % (sig_filename))
1365 fingerprint = args[0]
1366 if not keywords.has_key("GOODSIG"):
1367 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1368 if not keywords.has_key("SIG_ID"):
1369 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1371 # Finally ensure there's not something we don't recognise
1372 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1373 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1374 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1376 for keyword in keywords.keys():
1377 if not known_keywords.has_key(keyword):
1378 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1380 if len(rejects) > 0:
1381 return (None, rejects)
1383 return (fingerprint, [])
1385 ################################################################################
1387 def gpg_get_key_addresses(fingerprint):
1388 """retreive email addresses from gpg key uids for a given fingerprint"""
1389 addresses = key_uid_email_cache.get(fingerprint)
1390 if addresses != None:
1393 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1394 % (gpg_keyring_args(), fingerprint)
1395 (result, output) = commands.getstatusoutput(cmd)
1397 for l in output.split('\n'):
1398 m = re_gpg_uid.match(l)
1400 addresses.add(m.group(1))
1401 key_uid_email_cache[fingerprint] = addresses
1404 ################################################################################
1406 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1408 def wrap(paragraph, max_length, prefix=""):
1412 words = paragraph.split()
1415 word_size = len(word)
1416 if word_size > max_length:
1418 s += line + '\n' + prefix
1419 s += word + '\n' + prefix
1422 new_length = len(line) + word_size + 1
1423 if new_length > max_length:
1424 s += line + '\n' + prefix
1437 ################################################################################
1439 def clean_symlink (src, dest, root):
1441 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1444 src = src.replace(root, '', 1)
1445 dest = dest.replace(root, '', 1)
1446 dest = os.path.dirname(dest)
1447 new_src = '../' * len(dest.split('/'))
1448 return new_src + src
1450 ################################################################################
1452 def temp_filename(directory=None, prefix="dak", suffix=""):
1454 Return a secure and unique filename by pre-creating it.
1455 If 'directory' is non-null, it will be the directory the file is pre-created in.
1456 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1457 If 'suffix' is non-null, the filename will end with it.
1459 Returns a pair (fd, name).
1462 return tempfile.mkstemp(suffix, prefix, directory)
1464 ################################################################################
1466 def temp_dirname(parent=None, prefix="dak", suffix=""):
1468 Return a secure and unique directory by pre-creating it.
1469 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1470 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1471 If 'suffix' is non-null, the filename will end with it.
1473 Returns a pathname to the new directory
1476 return tempfile.mkdtemp(suffix, prefix, parent)
1478 ################################################################################
1480 def is_email_alias(email):
1481 """ checks if the user part of the email is listed in the alias file """
1483 if alias_cache == None:
1484 aliasfn = which_alias_file()
1487 for l in open(aliasfn):
1488 alias_cache.add(l.split(':')[0])
1489 uid = email.split('@')[0]
1490 return uid in alias_cache
1492 ################################################################################
1494 def get_changes_files(from_dir):
1496 Takes a directory and lists all .changes files in it (as well as chdir'ing
1497 to the directory; this is due to broken behaviour on the part of p-u/p-a
1498 when you're not in the right place)
1500 Returns a list of filenames
1503 # Much of the rest of p-u/p-a depends on being in the right place
1505 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1507 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1509 return changes_files
1511 ################################################################################
1515 Cnf = apt_pkg.newConfiguration()
1516 if not os.getenv("DAK_TEST"):
1517 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1519 if which_conf_file() != default_config:
1520 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())