2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
50 from srcformats import srcformats, get_format_from_string
51 from collections import defaultdict
53 ################################################################################
55 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
56 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
58 alias_cache = None #: Cache for email alias checks
59 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
61 # (hashname, function, earliest_changes_version)
62 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
63 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
65 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
66 # all situations under lenny's Python.
68 def dak_getstatusoutput(cmd):
69 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
70 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
72 output = "".join(pipe.stdout.readlines())
79 commands.getstatusoutput = dak_getstatusoutput
81 ################################################################################
84 """ Escape html chars """
85 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
87 ################################################################################
89 def open_file(filename, mode='r'):
91 Open C{file}, return fileobject.
93 @type filename: string
94 @param filename: path/filename to open
97 @param mode: open mode
100 @return: open fileobject
102 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
106 f = open(filename, mode)
108 raise CantOpenError, filename
111 ################################################################################
113 def our_raw_input(prompt=""):
115 sys.stdout.write(prompt)
121 sys.stderr.write("\nUser interrupt (^D).\n")
124 ################################################################################
126 def extract_component_from_section(section):
129 if section.find('/') != -1:
130 component = section.split('/')[0]
132 # Expand default component
134 if Cnf.has_key("Component::%s" % section):
139 return (section, component)
141 ################################################################################
143 def parse_deb822(contents, signing_rules=0):
147 # Split the lines in the input, keeping the linebreaks.
148 lines = contents.splitlines(True)
151 raise ParseChangesError, "[Empty changes file]"
153 # Reindex by line number so we can easily verify the format of
159 indexed_lines[index] = line[:-1]
163 num_of_lines = len(indexed_lines.keys())
166 while index < num_of_lines:
168 line = indexed_lines[index]
170 if signing_rules == 1:
172 if index > num_of_lines:
173 raise InvalidDscError, index
174 line = indexed_lines[index]
175 if not line.startswith("-----BEGIN PGP SIGNATURE"):
176 raise InvalidDscError, index
181 if line.startswith("-----BEGIN PGP SIGNATURE"):
183 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
185 if signing_rules == 1:
186 while index < num_of_lines and line != "":
188 line = indexed_lines[index]
190 # If we're not inside the signed data, don't process anything
191 if signing_rules >= 0 and not inside_signature:
193 slf = re_single_line_field.match(line)
195 field = slf.groups()[0].lower()
196 changes[field] = slf.groups()[1]
200 changes[field] += '\n'
202 mlf = re_multi_line_field.match(line)
205 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
206 if first == 1 and changes[field] != "":
207 changes[field] += '\n'
209 changes[field] += mlf.groups()[0] + '\n'
213 if signing_rules == 1 and inside_signature:
214 raise InvalidDscError, index
216 changes["filecontents"] = "".join(lines)
218 if changes.has_key("source"):
219 # Strip the source version in brackets from the source field,
220 # put it in the "source-version" field instead.
221 srcver = re_srchasver.search(changes["source"])
223 changes["source"] = srcver.group(1)
224 changes["source-version"] = srcver.group(2)
227 raise ParseChangesError, error
231 ################################################################################
233 def parse_changes(filename, signing_rules=0):
235 Parses a changes file and returns a dictionary where each field is a
236 key. The mandatory first argument is the filename of the .changes
239 signing_rules is an optional argument:
241 - If signing_rules == -1, no signature is required.
242 - If signing_rules == 0 (the default), a signature is required.
243 - If signing_rules == 1, it turns on the same strict format checking
246 The rules for (signing_rules == 1)-mode are:
248 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
249 followed by any PGP header data and must end with a blank line.
251 - The data section must end with a blank line and must be followed by
252 "-----BEGIN PGP SIGNATURE-----".
255 changes_in = open_file(filename)
256 content = changes_in.read()
259 unicode(content, 'utf-8')
261 raise ChangesUnicodeError, "Changes file not proper utf-8"
262 return parse_deb822(content, signing_rules)
264 ################################################################################
266 def hash_key(hashname):
267 return '%ssum' % hashname
269 ################################################################################
271 def create_hash(where, files, hashname, hashfunc):
273 create_hash extends the passed files dict with the given hash by
274 iterating over all files on disk and passing them to the hashing
279 for f in files.keys():
281 file_handle = open_file(f)
282 except CantOpenError:
283 rejmsg.append("Could not open file %s for checksumming" % (f))
286 files[f][hash_key(hashname)] = hashfunc(file_handle)
291 ################################################################################
293 def check_hash(where, files, hashname, hashfunc):
295 check_hash checks the given hash in the files dict against the actual
296 files on disk. The hash values need to be present consistently in
297 all file entries. It does not modify its input in any way.
301 for f in files.keys():
305 file_handle = open_file(f)
307 # Check for the hash entry, to not trigger a KeyError.
308 if not files[f].has_key(hash_key(hashname)):
309 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
313 # Actually check the hash for correctness.
314 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
315 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
317 except CantOpenError:
318 # TODO: This happens when the file is in the pool.
319 # warn("Cannot open file %s" % f)
326 ################################################################################
328 def check_size(where, files):
330 check_size checks the file sizes in the passed files dict against the
335 for f in files.keys():
340 # TODO: This happens when the file is in the pool.
344 actual_size = entry[stat.ST_SIZE]
345 size = int(files[f]["size"])
346 if size != actual_size:
347 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
348 % (f, actual_size, size, where))
351 ################################################################################
353 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
355 Verify that the files listed in the Files field of the .dsc are
356 those expected given the announced Format.
358 @type dsc_filename: string
359 @param dsc_filename: path of .dsc file
362 @param dsc: the content of the .dsc parsed by C{parse_changes()}
364 @type dsc_files: dict
365 @param dsc_files: the file list returned by C{build_file_list()}
368 @return: all errors detected
372 # Parse the file if needed
374 dsc = parse_changes(dsc_filename, signing_rules=1);
376 if dsc_files is None:
377 dsc_files = build_file_list(dsc, is_a_dsc=1)
379 # Ensure .dsc lists proper set of source files according to the format
381 has = defaultdict(lambda: 0)
384 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
385 (r'diff.gz', ('debian_diff',)),
386 (r'tar.gz', ('native_tar_gz', 'native_tar')),
387 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
388 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
389 (r'tar\.(gz|bz2)', ('native_tar',)),
390 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
393 for f in dsc_files.keys():
394 m = re_issource.match(f)
396 rejmsg.append("%s: %s in Files field not recognised as source."
400 # Populate 'has' dictionary by resolving keys in lookup table
402 for regex, keys in ftype_lookup:
403 if re.match(regex, m.group(3)):
409 # File does not match anything in lookup table; reject
411 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
413 # Check for multiple files
414 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
415 if has[file_type] > 1:
416 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
418 # Source format specific tests
419 for format in srcformats:
420 if format.re_format.match(dsc['format']):
422 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
428 ################################################################################
430 def check_hash_fields(what, manifest):
432 check_hash_fields ensures that there are no checksum fields in the
433 given dict that we do not know about.
437 hashes = map(lambda x: x[0], known_hashes)
438 for field in manifest:
439 if field.startswith("checksums-"):
440 hashname = field.split("-",1)[1]
441 if hashname not in hashes:
442 rejmsg.append("Unsupported checksum field for %s "\
443 "in %s" % (hashname, what))
446 ################################################################################
448 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
449 if format >= version:
450 # The version should contain the specified hash.
453 # Import hashes from the changes
454 rejmsg = parse_checksums(".changes", files, changes, hashname)
458 # We need to calculate the hash because it can't possibly
461 return func(".changes", files, hashname, hashfunc)
463 # We could add the orig which might be in the pool to the files dict to
464 # access the checksums easily.
466 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
468 ensure_dsc_hashes' task is to ensure that each and every *present* hash
469 in the dsc is correct, i.e. identical to the changes file and if necessary
470 the pool. The latter task is delegated to check_hash.
474 if not dsc.has_key('Checksums-%s' % (hashname,)):
476 # Import hashes from the dsc
477 parse_checksums(".dsc", dsc_files, dsc, hashname)
479 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
482 ################################################################################
484 def parse_checksums(where, files, manifest, hashname):
486 field = 'checksums-%s' % hashname
487 if not field in manifest:
489 for line in manifest[field].split('\n'):
492 clist = line.strip().split(' ')
494 checksum, size, checkfile = clist
496 rejmsg.append("Cannot parse checksum line [%s]" % (line))
498 if not files.has_key(checkfile):
499 # TODO: check for the file's entry in the original files dict, not
500 # the one modified by (auto)byhand and other weird stuff
501 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
502 # (file, hashname, where))
504 if not files[checkfile]["size"] == size:
505 rejmsg.append("%s: size differs for files and checksums-%s entry "\
506 "in %s" % (checkfile, hashname, where))
508 files[checkfile][hash_key(hashname)] = checksum
509 for f in files.keys():
510 if not files[f].has_key(hash_key(hashname)):
511 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
515 ################################################################################
517 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
519 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
522 # Make sure we have a Files: field to parse...
523 if not changes.has_key(field):
524 raise NoFilesFieldError
526 # Get SourceFormat object for this Format and validate it
527 format = get_format_from_string(changes.get['format'])
528 format.validate_format(is_a_dsc=is_a_dsc, field=field)
530 includes_section = (not is_a_dsc) and field == "files"
532 # Parse each entry/line:
533 for i in changes[field].split('\n'):
537 section = priority = ""
540 (md5, size, section, priority, name) = s
542 (md5, size, name) = s
544 raise ParseChangesError, i
551 (section, component) = extract_component_from_section(section)
553 files[name] = Dict(size=size, section=section,
554 priority=priority, component=component)
555 files[name][hashname] = md5
559 ################################################################################
561 def send_mail (message, filename=""):
562 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
564 # If we've been passed a string dump it into a temporary file
566 (fd, filename) = tempfile.mkstemp()
567 os.write (fd, message)
570 if Cnf.has_key("Dinstall::MailWhiteList") and \
571 Cnf["Dinstall::MailWhiteList"] != "":
572 message_in = open_file(filename)
573 message_raw = modemail.message_from_file(message_in)
577 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
579 for line in whitelist_in:
580 if not re_whitespace_comment.match(line):
581 if re_re_mark.match(line):
582 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
584 whitelist.append(re.compile(re.escape(line.strip())))
589 fields = ["To", "Bcc", "Cc"]
592 value = message_raw.get(field, None)
595 for item in value.split(","):
596 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
602 if not mail_whitelisted:
603 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
607 # Doesn't have any mail in whitelist so remove the header
609 del message_raw[field]
611 message_raw.replace_header(field, string.join(match, ", "))
613 # Change message fields in order if we don't have a To header
614 if not message_raw.has_key("To"):
617 if message_raw.has_key(field):
618 message_raw[fields[-1]] = message_raw[field]
619 del message_raw[field]
622 # Clean up any temporary files
623 # and return, as we removed all recipients.
625 os.unlink (filename);
628 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
629 os.write (fd, message_raw.as_string(True));
633 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
635 raise SendmailFailedError, output
637 # Clean up any temporary files
641 ################################################################################
643 def poolify (source, component):
646 if source[:3] == "lib":
647 return component + source[:4] + '/' + source + '/'
649 return component + source[:1] + '/' + source + '/'
651 ################################################################################
653 def move (src, dest, overwrite = 0, perms = 0664):
654 if os.path.exists(dest) and os.path.isdir(dest):
657 dest_dir = os.path.dirname(dest)
658 if not os.path.exists(dest_dir):
659 umask = os.umask(00000)
660 os.makedirs(dest_dir, 02775)
662 #print "Moving %s to %s..." % (src, dest)
663 if os.path.exists(dest) and os.path.isdir(dest):
664 dest += '/' + os.path.basename(src)
665 # Don't overwrite unless forced to
666 if os.path.exists(dest):
668 fubar("Can't move %s to %s - file already exists." % (src, dest))
670 if not os.access(dest, os.W_OK):
671 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
672 shutil.copy2(src, dest)
673 os.chmod(dest, perms)
676 def copy (src, dest, overwrite = 0, perms = 0664):
677 if os.path.exists(dest) and os.path.isdir(dest):
680 dest_dir = os.path.dirname(dest)
681 if not os.path.exists(dest_dir):
682 umask = os.umask(00000)
683 os.makedirs(dest_dir, 02775)
685 #print "Copying %s to %s..." % (src, dest)
686 if os.path.exists(dest) and os.path.isdir(dest):
687 dest += '/' + os.path.basename(src)
688 # Don't overwrite unless forced to
689 if os.path.exists(dest):
691 raise FileExistsError
693 if not os.access(dest, os.W_OK):
694 raise CantOverwriteError
695 shutil.copy2(src, dest)
696 os.chmod(dest, perms)
698 ################################################################################
701 res = socket.gethostbyaddr(socket.gethostname())
702 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
703 if database_hostname:
704 return database_hostname
708 def which_conf_file ():
709 res = socket.gethostbyaddr(socket.gethostname())
710 # In case we allow local config files per user, try if one exists
711 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
712 homedir = os.getenv("HOME")
713 confpath = os.path.join(homedir, "/etc/dak.conf")
714 if os.path.exists(confpath):
715 apt_pkg.ReadConfigFileISC(Cnf,default_config)
717 # We are still in here, so there is no local config file or we do
718 # not allow local files. Do the normal stuff.
719 if Cnf.get("Config::" + res[0] + "::DakConfig"):
720 return Cnf["Config::" + res[0] + "::DakConfig"]
722 return default_config
724 def which_apt_conf_file ():
725 res = socket.gethostbyaddr(socket.gethostname())
726 # In case we allow local config files per user, try if one exists
727 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
728 homedir = os.getenv("HOME")
729 confpath = os.path.join(homedir, "/etc/dak.conf")
730 if os.path.exists(confpath):
731 apt_pkg.ReadConfigFileISC(Cnf,default_config)
733 if Cnf.get("Config::" + res[0] + "::AptConfig"):
734 return Cnf["Config::" + res[0] + "::AptConfig"]
736 return default_apt_config
738 def which_alias_file():
739 hostname = socket.gethostbyaddr(socket.gethostname())[0]
740 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
741 if os.path.exists(aliasfn):
746 ################################################################################
748 def TemplateSubst(map, filename):
749 """ Perform a substition of template """
750 templatefile = open_file(filename)
751 template = templatefile.read()
753 template = template.replace(x, str(map[x]))
757 ################################################################################
759 def fubar(msg, exit_code=1):
760 sys.stderr.write("E: %s\n" % (msg))
764 sys.stderr.write("W: %s\n" % (msg))
766 ################################################################################
768 # Returns the user name with a laughable attempt at rfc822 conformancy
769 # (read: removing stray periods).
771 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
774 return pwd.getpwuid(os.getuid())[0]
776 ################################################################################
786 return ("%d%s" % (c, t))
788 ################################################################################
790 def cc_fix_changes (changes):
791 o = changes.get("architecture", "")
793 del changes["architecture"]
794 changes["architecture"] = {}
796 changes["architecture"][j] = 1
798 def changes_compare (a, b):
799 """ Sort by source name, source version, 'have source', and then by filename """
801 a_changes = parse_changes(a)
806 b_changes = parse_changes(b)
810 cc_fix_changes (a_changes)
811 cc_fix_changes (b_changes)
813 # Sort by source name
814 a_source = a_changes.get("source")
815 b_source = b_changes.get("source")
816 q = cmp (a_source, b_source)
820 # Sort by source version
821 a_version = a_changes.get("version", "0")
822 b_version = b_changes.get("version", "0")
823 q = apt_pkg.VersionCompare(a_version, b_version)
827 # Sort by 'have source'
828 a_has_source = a_changes["architecture"].get("source")
829 b_has_source = b_changes["architecture"].get("source")
830 if a_has_source and not b_has_source:
832 elif b_has_source and not a_has_source:
835 # Fall back to sort by filename
838 ################################################################################
840 def find_next_free (dest, too_many=100):
843 while os.path.exists(dest) and extra < too_many:
844 dest = orig_dest + '.' + repr(extra)
846 if extra >= too_many:
847 raise NoFreeFilenameError
850 ################################################################################
852 def result_join (original, sep = '\t'):
854 for i in xrange(len(original)):
855 if original[i] == None:
856 resultlist.append("")
858 resultlist.append(original[i])
859 return sep.join(resultlist)
861 ################################################################################
863 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
865 for line in str.split('\n'):
867 if line or include_blank_lines:
868 out += "%s%s\n" % (prefix, line)
869 # Strip trailing new line
874 ################################################################################
876 def validate_changes_file_arg(filename, require_changes=1):
878 'filename' is either a .changes or .dak file. If 'filename' is a
879 .dak file, it's changed to be the corresponding .changes file. The
880 function then checks if the .changes file a) exists and b) is
881 readable and returns the .changes filename if so. If there's a
882 problem, the next action depends on the option 'require_changes'
885 - If 'require_changes' == -1, errors are ignored and the .changes
886 filename is returned.
887 - If 'require_changes' == 0, a warning is given and 'None' is returned.
888 - If 'require_changes' == 1, a fatal error is raised.
893 orig_filename = filename
894 if filename.endswith(".dak"):
895 filename = filename[:-4]+".changes"
897 if not filename.endswith(".changes"):
898 error = "invalid file type; not a changes file"
900 if not os.access(filename,os.R_OK):
901 if os.path.exists(filename):
902 error = "permission denied"
904 error = "file not found"
907 if require_changes == 1:
908 fubar("%s: %s." % (orig_filename, error))
909 elif require_changes == 0:
910 warn("Skipping %s - %s" % (orig_filename, error))
912 else: # We only care about the .dak file
917 ################################################################################
920 return (arch != "source" and arch != "all")
922 ################################################################################
924 def join_with_commas_and(list):
925 if len(list) == 0: return "nothing"
926 if len(list) == 1: return list[0]
927 return ", ".join(list[:-1]) + " and " + list[-1]
929 ################################################################################
934 (pkg, version, constraint) = atom
936 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
939 pp_deps.append(pp_dep)
940 return " |".join(pp_deps)
942 ################################################################################
947 ################################################################################
949 def parse_args(Options):
950 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
951 # XXX: This should go away and everything which calls it be converted
952 # to use SQLA properly. For now, we'll just fix it not to use
953 # the old Pg interface though
954 session = DBConn().session()
958 for suitename in split_args(Options["Suite"]):
959 suite = get_suite(suitename, session=session)
960 if suite.suite_id is None:
961 warn("suite '%s' not recognised." % (suite.suite_name))
963 suite_ids_list.append(suite.suite_id)
965 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
967 fubar("No valid suite given.")
972 if Options["Component"]:
973 component_ids_list = []
974 for componentname in split_args(Options["Component"]):
975 component = get_component(componentname, session=session)
976 if component is None:
977 warn("component '%s' not recognised." % (componentname))
979 component_ids_list.append(component.component_id)
980 if component_ids_list:
981 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
983 fubar("No valid component given.")
987 # Process architecture
988 con_architectures = ""
990 if Options["Architecture"]:
992 for archname in split_args(Options["Architecture"]):
993 if archname == "source":
996 arch = get_architecture(archname, session=session)
998 warn("architecture '%s' not recognised." % (archname))
1000 arch_ids_list.append(arch.arch_id)
1002 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1004 if not check_source:
1005 fubar("No valid architecture given.")
1009 return (con_suites, con_architectures, con_components, check_source)
1011 ################################################################################
1013 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1014 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1017 tb = sys.exc_info()[2]
1024 frame = frame.f_back
1026 traceback.print_exc()
1028 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1029 frame.f_code.co_filename,
1031 for key, value in frame.f_locals.items():
1032 print "\t%20s = " % key,
1036 print "<unable to print>"
1038 ################################################################################
1040 def try_with_debug(function):
1048 ################################################################################
1050 def arch_compare_sw (a, b):
1052 Function for use in sorting lists of architectures.
1054 Sorts normally except that 'source' dominates all others.
1057 if a == "source" and b == "source":
1066 ################################################################################
1068 def split_args (s, dwim=1):
1070 Split command line arguments which can be separated by either commas
1071 or whitespace. If dwim is set, it will complain about string ending
1072 in comma since this usually means someone did 'dak ls -a i386, m68k
1073 foo' or something and the inevitable confusion resulting from 'm68k'
1074 being treated as an argument is undesirable.
1077 if s.find(",") == -1:
1080 if s[-1:] == "," and dwim:
1081 fubar("split_args: found trailing comma, spurious space maybe?")
1084 ################################################################################
1086 def Dict(**dict): return dict
1088 ########################################
1090 def gpgv_get_status_output(cmd, status_read, status_write):
1092 Our very own version of commands.getouputstatus(), hacked to support
1096 cmd = ['/bin/sh', '-c', cmd]
1097 p2cread, p2cwrite = os.pipe()
1098 c2pread, c2pwrite = os.pipe()
1099 errout, errin = os.pipe()
1109 for i in range(3, 256):
1110 if i != status_write:
1116 os.execvp(cmd[0], cmd)
1122 os.dup2(c2pread, c2pwrite)
1123 os.dup2(errout, errin)
1125 output = status = ""
1127 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1130 r = os.read(fd, 8196)
1132 more_data.append(fd)
1133 if fd == c2pwrite or fd == errin:
1135 elif fd == status_read:
1138 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1140 pid, exit_status = os.waitpid(pid, 0)
1142 os.close(status_write)
1143 os.close(status_read)
1153 return output, status, exit_status
1155 ################################################################################
1157 def process_gpgv_output(status):
1158 # Process the status-fd output
1161 for line in status.split('\n'):
1165 split = line.split()
1167 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1169 (gnupg, keyword) = split[:2]
1170 if gnupg != "[GNUPG:]":
1171 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1174 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1175 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1178 keywords[keyword] = args
1180 return (keywords, internal_error)
1182 ################################################################################
1184 def retrieve_key (filename, keyserver=None, keyring=None):
1186 Retrieve the key that signed 'filename' from 'keyserver' and
1187 add it to 'keyring'. Returns nothing on success, or an error message
1191 # Defaults for keyserver and keyring
1193 keyserver = Cnf["Dinstall::KeyServer"]
1195 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1197 # Ensure the filename contains no shell meta-characters or other badness
1198 if not re_taint_free.match(filename):
1199 return "%s: tainted filename" % (filename)
1201 # Invoke gpgv on the file
1202 status_read, status_write = os.pipe()
1203 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1204 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1206 # Process the status-fd output
1207 (keywords, internal_error) = process_gpgv_output(status)
1209 return internal_error
1211 if not keywords.has_key("NO_PUBKEY"):
1212 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1214 fingerprint = keywords["NO_PUBKEY"][0]
1215 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1216 # it'll try to create a lockfile in /dev. A better solution might
1217 # be a tempfile or something.
1218 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1219 % (Cnf["Dinstall::SigningKeyring"])
1220 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1221 % (keyring, keyserver, fingerprint)
1222 (result, output) = commands.getstatusoutput(cmd)
1224 return "'%s' failed with exit code %s" % (cmd, result)
1228 ################################################################################
1230 def gpg_keyring_args(keyrings=None):
1232 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1234 return " ".join(["--keyring %s" % x for x in keyrings])
1236 ################################################################################
1238 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1240 Check the signature of a file and return the fingerprint if the
1241 signature is valid or 'None' if it's not. The first argument is the
1242 filename whose signature should be checked. The second argument is a
1243 reject function and is called when an error is found. The reject()
1244 function must allow for two arguments: the first is the error message,
1245 the second is an optional prefix string. It's possible for reject()
1246 to be called more than once during an invocation of check_signature().
1247 The third argument is optional and is the name of the files the
1248 detached signature applies to. The fourth argument is optional and is
1249 a *list* of keyrings to use. 'autofetch' can either be None, True or
1250 False. If None, the default behaviour specified in the config will be
1256 # Ensure the filename contains no shell meta-characters or other badness
1257 if not re_taint_free.match(sig_filename):
1258 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1259 return (None, rejects)
1261 if data_filename and not re_taint_free.match(data_filename):
1262 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1263 return (None, rejects)
1266 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1268 # Autofetch the signing key if that's enabled
1269 if autofetch == None:
1270 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1272 error_msg = retrieve_key(sig_filename)
1274 rejects.append(error_msg)
1275 return (None, rejects)
1277 # Build the command line
1278 status_read, status_write = os.pipe()
1279 cmd = "gpgv --status-fd %s %s %s %s" % (
1280 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1282 # Invoke gpgv on the file
1283 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1285 # Process the status-fd output
1286 (keywords, internal_error) = process_gpgv_output(status)
1288 # If we failed to parse the status-fd output, let's just whine and bail now
1290 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1291 rejects.append(internal_error, "")
1292 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1293 return (None, rejects)
1295 # Now check for obviously bad things in the processed output
1296 if keywords.has_key("KEYREVOKED"):
1297 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1298 if keywords.has_key("BADSIG"):
1299 rejects.append("bad signature on %s." % (sig_filename))
1300 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1301 rejects.append("failed to check signature on %s." % (sig_filename))
1302 if keywords.has_key("NO_PUBKEY"):
1303 args = keywords["NO_PUBKEY"]
1306 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1307 if keywords.has_key("BADARMOR"):
1308 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1309 if keywords.has_key("NODATA"):
1310 rejects.append("no signature found in %s." % (sig_filename))
1311 if keywords.has_key("EXPKEYSIG"):
1312 args = keywords["EXPKEYSIG"]
1315 rejects.append("Signature made by expired key 0x%s" % (key))
1316 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1317 args = keywords["KEYEXPIRED"]
1321 if timestamp.count("T") == 0:
1323 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1325 expiredate = "unknown (%s)" % (timestamp)
1327 expiredate = timestamp
1328 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1330 if len(rejects) > 0:
1331 return (None, rejects)
1333 # Next check gpgv exited with a zero return code
1335 rejects.append("gpgv failed while checking %s." % (sig_filename))
1337 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1339 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1340 return (None, rejects)
1342 # Sanity check the good stuff we expect
1343 if not keywords.has_key("VALIDSIG"):
1344 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1346 args = keywords["VALIDSIG"]
1348 rejects.append("internal error while checking signature on %s." % (sig_filename))
1350 fingerprint = args[0]
1351 if not keywords.has_key("GOODSIG"):
1352 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1353 if not keywords.has_key("SIG_ID"):
1354 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1356 # Finally ensure there's not something we don't recognise
1357 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1358 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1359 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1361 for keyword in keywords.keys():
1362 if not known_keywords.has_key(keyword):
1363 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1365 if len(rejects) > 0:
1366 return (None, rejects)
1368 return (fingerprint, [])
1370 ################################################################################
1372 def gpg_get_key_addresses(fingerprint):
1373 """retreive email addresses from gpg key uids for a given fingerprint"""
1374 addresses = key_uid_email_cache.get(fingerprint)
1375 if addresses != None:
1378 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1379 % (gpg_keyring_args(), fingerprint)
1380 (result, output) = commands.getstatusoutput(cmd)
1382 for l in output.split('\n'):
1383 m = re_gpg_uid.match(l)
1385 addresses.add(m.group(1))
1386 key_uid_email_cache[fingerprint] = addresses
1389 ################################################################################
1391 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1393 def wrap(paragraph, max_length, prefix=""):
1397 words = paragraph.split()
1400 word_size = len(word)
1401 if word_size > max_length:
1403 s += line + '\n' + prefix
1404 s += word + '\n' + prefix
1407 new_length = len(line) + word_size + 1
1408 if new_length > max_length:
1409 s += line + '\n' + prefix
1422 ################################################################################
1424 def clean_symlink (src, dest, root):
1426 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1429 src = src.replace(root, '', 1)
1430 dest = dest.replace(root, '', 1)
1431 dest = os.path.dirname(dest)
1432 new_src = '../' * len(dest.split('/'))
1433 return new_src + src
1435 ################################################################################
1437 def temp_filename(directory=None, prefix="dak", suffix=""):
1439 Return a secure and unique filename by pre-creating it.
1440 If 'directory' is non-null, it will be the directory the file is pre-created in.
1441 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1442 If 'suffix' is non-null, the filename will end with it.
1444 Returns a pair (fd, name).
1447 return tempfile.mkstemp(suffix, prefix, directory)
1449 ################################################################################
1451 def temp_dirname(parent=None, prefix="dak", suffix=""):
1453 Return a secure and unique directory by pre-creating it.
1454 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1455 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1456 If 'suffix' is non-null, the filename will end with it.
1458 Returns a pathname to the new directory
1461 return tempfile.mkdtemp(suffix, prefix, parent)
1463 ################################################################################
1465 def is_email_alias(email):
1466 """ checks if the user part of the email is listed in the alias file """
1468 if alias_cache == None:
1469 aliasfn = which_alias_file()
1472 for l in open(aliasfn):
1473 alias_cache.add(l.split(':')[0])
1474 uid = email.split('@')[0]
1475 return uid in alias_cache
1477 ################################################################################
1479 def get_changes_files(dir):
1481 Takes a directory and lists all .changes files in it (as well as chdir'ing
1482 to the directory; this is due to broken behaviour on the part of p-u/p-a
1483 when you're not in the right place)
1485 Returns a list of filenames
1488 # Much of the rest of p-u/p-a depends on being in the right place
1490 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1492 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1494 return changes_files
1496 ################################################################################
1500 Cnf = apt_pkg.newConfiguration()
1501 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1503 if which_conf_file() != default_config:
1504 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1506 ###############################################################################