2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
50 from srcformats import srcformats
51 from collections import defaultdict
53 ################################################################################
55 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
56 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
58 alias_cache = None #: Cache for email alias checks
59 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
61 # (hashname, function, earliest_changes_version)
62 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
63 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
65 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
66 # all situations under lenny's Python.
68 def dak_getstatusoutput(cmd):
69 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
70 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
72 output = "".join(pipe.stdout.readlines())
79 commands.getstatusoutput = dak_getstatusoutput
81 ################################################################################
84 """ Escape html chars """
85 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
87 ################################################################################
89 def open_file(filename, mode='r'):
91 Open C{file}, return fileobject.
93 @type filename: string
94 @param filename: path/filename to open
97 @param mode: open mode
100 @return: open fileobject
102 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
106 f = open(filename, mode)
108 raise CantOpenError, filename
111 ################################################################################
113 def our_raw_input(prompt=""):
115 sys.stdout.write(prompt)
121 sys.stderr.write("\nUser interrupt (^D).\n")
124 ################################################################################
126 def extract_component_from_section(section):
129 if section.find('/') != -1:
130 component = section.split('/')[0]
132 # Expand default component
134 if Cnf.has_key("Component::%s" % section):
139 return (section, component)
141 ################################################################################
143 def parse_deb822(contents, signing_rules=0):
147 # Split the lines in the input, keeping the linebreaks.
148 lines = contents.splitlines(True)
151 raise ParseChangesError, "[Empty changes file]"
153 # Reindex by line number so we can easily verify the format of
159 indexed_lines[index] = line[:-1]
163 num_of_lines = len(indexed_lines.keys())
166 while index < num_of_lines:
168 line = indexed_lines[index]
170 if signing_rules == 1:
172 if index > num_of_lines:
173 raise InvalidDscError, index
174 line = indexed_lines[index]
175 if not line.startswith("-----BEGIN PGP SIGNATURE"):
176 raise InvalidDscError, index
181 if line.startswith("-----BEGIN PGP SIGNATURE"):
183 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
185 if signing_rules == 1:
186 while index < num_of_lines and line != "":
188 line = indexed_lines[index]
190 # If we're not inside the signed data, don't process anything
191 if signing_rules >= 0 and not inside_signature:
193 slf = re_single_line_field.match(line)
195 field = slf.groups()[0].lower()
196 changes[field] = slf.groups()[1]
200 changes[field] += '\n'
202 mlf = re_multi_line_field.match(line)
205 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
206 if first == 1 and changes[field] != "":
207 changes[field] += '\n'
209 changes[field] += mlf.groups()[0] + '\n'
213 if signing_rules == 1 and inside_signature:
214 raise InvalidDscError, index
216 changes["filecontents"] = "".join(lines)
218 if changes.has_key("source"):
219 # Strip the source version in brackets from the source field,
220 # put it in the "source-version" field instead.
221 srcver = re_srchasver.search(changes["source"])
223 changes["source"] = srcver.group(1)
224 changes["source-version"] = srcver.group(2)
227 raise ParseChangesError, error
231 ################################################################################
233 def parse_changes(filename, signing_rules=0):
235 Parses a changes file and returns a dictionary where each field is a
236 key. The mandatory first argument is the filename of the .changes
239 signing_rules is an optional argument:
241 - If signing_rules == -1, no signature is required.
242 - If signing_rules == 0 (the default), a signature is required.
243 - If signing_rules == 1, it turns on the same strict format checking
246 The rules for (signing_rules == 1)-mode are:
248 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
249 followed by any PGP header data and must end with a blank line.
251 - The data section must end with a blank line and must be followed by
252 "-----BEGIN PGP SIGNATURE-----".
255 changes_in = open_file(filename)
256 content = changes_in.read()
259 unicode(content, 'utf-8')
261 raise ChangesUnicodeError, "Changes file not proper utf-8"
262 return parse_deb822(content, signing_rules)
264 ################################################################################
266 def hash_key(hashname):
267 return '%ssum' % hashname
269 ################################################################################
271 def create_hash(where, files, hashname, hashfunc):
273 create_hash extends the passed files dict with the given hash by
274 iterating over all files on disk and passing them to the hashing
279 for f in files.keys():
281 file_handle = open_file(f)
282 except CantOpenError:
283 rejmsg.append("Could not open file %s for checksumming" % (f))
286 files[f][hash_key(hashname)] = hashfunc(file_handle)
291 ################################################################################
293 def check_hash(where, files, hashname, hashfunc):
295 check_hash checks the given hash in the files dict against the actual
296 files on disk. The hash values need to be present consistently in
297 all file entries. It does not modify its input in any way.
301 for f in files.keys():
305 file_handle = open_file(f)
307 # Check for the hash entry, to not trigger a KeyError.
308 if not files[f].has_key(hash_key(hashname)):
309 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
313 # Actually check the hash for correctness.
314 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
315 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
317 except CantOpenError:
318 # TODO: This happens when the file is in the pool.
319 # warn("Cannot open file %s" % f)
326 ################################################################################
328 def check_size(where, files):
330 check_size checks the file sizes in the passed files dict against the
335 for f in files.keys():
340 # TODO: This happens when the file is in the pool.
344 actual_size = entry[stat.ST_SIZE]
345 size = int(files[f]["size"])
346 if size != actual_size:
347 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
348 % (f, actual_size, size, where))
351 ################################################################################
353 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
355 Verify that the files listed in the Files field of the .dsc are
356 those expected given the announced Format.
358 @type dsc_filename: string
359 @param dsc_filename: path of .dsc file
362 @param dsc: the content of the .dsc parsed by C{parse_changes()}
364 @type dsc_files: dict
365 @param dsc_files: the file list returned by C{build_file_list()}
368 @return: all errors detected
372 # Parse the file if needed
374 dsc = parse_changes(dsc_filename, signing_rules=1);
376 if dsc_files is None:
377 dsc_files = build_file_list(dsc, is_a_dsc=1)
379 # Ensure .dsc lists proper set of source files according to the format
381 has = defaultdict(lambda: 0)
384 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
385 (r'diff.gz', ('debian_diff',)),
386 (r'tar.gz', ('native_tar_gz', 'native_tar')),
387 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
388 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
389 (r'tar\.(gz|bz2)', ('native_tar',)),
390 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
393 for f in dsc_files.keys():
394 m = re_issource.match(f)
396 rejmsg.append("%s: %s in Files field not recognised as source."
400 # Populate 'has' dictionary by resolving keys in lookup table
402 for regex, keys in ftype_lookup:
403 if re.match(regex, m.group(3)):
409 # File does not match anything in lookup table; reject
411 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
413 # Check for multiple files
414 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
415 if has[file_type] > 1:
416 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
418 # Source format specific tests
419 for format in srcformats:
420 if format.re_format.match(dsc['format']):
422 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
428 ################################################################################
430 def check_hash_fields(what, manifest):
432 check_hash_fields ensures that there are no checksum fields in the
433 given dict that we do not know about.
437 hashes = map(lambda x: x[0], known_hashes)
438 for field in manifest:
439 if field.startswith("checksums-"):
440 hashname = field.split("-",1)[1]
441 if hashname not in hashes:
442 rejmsg.append("Unsupported checksum field for %s "\
443 "in %s" % (hashname, what))
446 ################################################################################
448 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
449 if format >= version:
450 # The version should contain the specified hash.
453 # Import hashes from the changes
454 rejmsg = parse_checksums(".changes", files, changes, hashname)
458 # We need to calculate the hash because it can't possibly
461 return func(".changes", files, hashname, hashfunc)
463 # We could add the orig which might be in the pool to the files dict to
464 # access the checksums easily.
466 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
468 ensure_dsc_hashes' task is to ensure that each and every *present* hash
469 in the dsc is correct, i.e. identical to the changes file and if necessary
470 the pool. The latter task is delegated to check_hash.
474 if not dsc.has_key('Checksums-%s' % (hashname,)):
476 # Import hashes from the dsc
477 parse_checksums(".dsc", dsc_files, dsc, hashname)
479 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
482 ################################################################################
484 def parse_checksums(where, files, manifest, hashname):
486 field = 'checksums-%s' % hashname
487 if not field in manifest:
489 for line in manifest[field].split('\n'):
492 clist = line.strip().split(' ')
494 checksum, size, checkfile = clist
496 rejmsg.append("Cannot parse checksum line [%s]" % (line))
498 if not files.has_key(checkfile):
499 # TODO: check for the file's entry in the original files dict, not
500 # the one modified by (auto)byhand and other weird stuff
501 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
502 # (file, hashname, where))
504 if not files[checkfile]["size"] == size:
505 rejmsg.append("%s: size differs for files and checksums-%s entry "\
506 "in %s" % (checkfile, hashname, where))
508 files[checkfile][hash_key(hashname)] = checksum
509 for f in files.keys():
510 if not files[f].has_key(hash_key(hashname)):
511 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
515 ################################################################################
517 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
519 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
522 # Make sure we have a Files: field to parse...
523 if not changes.has_key(field):
524 raise NoFilesFieldError
526 format = SourceFormat.parse_format(changes.get["format"], field, is_a_dsc)
528 includes_section = (not is_a_dsc) and field == "files"
530 # Parse each entry/line:
531 for i in changes[field].split('\n'):
535 section = priority = ""
538 (md5, size, section, priority, name) = s
540 (md5, size, name) = s
542 raise ParseChangesError, i
549 (section, component) = extract_component_from_section(section)
551 files[name] = Dict(size=size, section=section,
552 priority=priority, component=component)
553 files[name][hashname] = md5
557 ################################################################################
559 def send_mail (message, filename=""):
560 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
562 # If we've been passed a string dump it into a temporary file
564 (fd, filename) = tempfile.mkstemp()
565 os.write (fd, message)
568 if Cnf.has_key("Dinstall::MailWhiteList") and \
569 Cnf["Dinstall::MailWhiteList"] != "":
570 message_in = open_file(filename)
571 message_raw = modemail.message_from_file(message_in)
575 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
577 for line in whitelist_in:
578 if not re_whitespace_comment.match(line):
579 if re_re_mark.match(line):
580 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
582 whitelist.append(re.compile(re.escape(line.strip())))
587 fields = ["To", "Bcc", "Cc"]
590 value = message_raw.get(field, None)
593 for item in value.split(","):
594 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
600 if not mail_whitelisted:
601 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
605 # Doesn't have any mail in whitelist so remove the header
607 del message_raw[field]
609 message_raw.replace_header(field, string.join(match, ", "))
611 # Change message fields in order if we don't have a To header
612 if not message_raw.has_key("To"):
615 if message_raw.has_key(field):
616 message_raw[fields[-1]] = message_raw[field]
617 del message_raw[field]
620 # Clean up any temporary files
621 # and return, as we removed all recipients.
623 os.unlink (filename);
626 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
627 os.write (fd, message_raw.as_string(True));
631 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
633 raise SendmailFailedError, output
635 # Clean up any temporary files
639 ################################################################################
641 def poolify (source, component):
644 if source[:3] == "lib":
645 return component + source[:4] + '/' + source + '/'
647 return component + source[:1] + '/' + source + '/'
649 ################################################################################
651 def move (src, dest, overwrite = 0, perms = 0664):
652 if os.path.exists(dest) and os.path.isdir(dest):
655 dest_dir = os.path.dirname(dest)
656 if not os.path.exists(dest_dir):
657 umask = os.umask(00000)
658 os.makedirs(dest_dir, 02775)
660 #print "Moving %s to %s..." % (src, dest)
661 if os.path.exists(dest) and os.path.isdir(dest):
662 dest += '/' + os.path.basename(src)
663 # Don't overwrite unless forced to
664 if os.path.exists(dest):
666 fubar("Can't move %s to %s - file already exists." % (src, dest))
668 if not os.access(dest, os.W_OK):
669 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
670 shutil.copy2(src, dest)
671 os.chmod(dest, perms)
674 def copy (src, dest, overwrite = 0, perms = 0664):
675 if os.path.exists(dest) and os.path.isdir(dest):
678 dest_dir = os.path.dirname(dest)
679 if not os.path.exists(dest_dir):
680 umask = os.umask(00000)
681 os.makedirs(dest_dir, 02775)
683 #print "Copying %s to %s..." % (src, dest)
684 if os.path.exists(dest) and os.path.isdir(dest):
685 dest += '/' + os.path.basename(src)
686 # Don't overwrite unless forced to
687 if os.path.exists(dest):
689 raise FileExistsError
691 if not os.access(dest, os.W_OK):
692 raise CantOverwriteError
693 shutil.copy2(src, dest)
694 os.chmod(dest, perms)
696 ################################################################################
699 res = socket.gethostbyaddr(socket.gethostname())
700 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
701 if database_hostname:
702 return database_hostname
706 def which_conf_file ():
707 res = socket.gethostbyaddr(socket.gethostname())
708 # In case we allow local config files per user, try if one exists
709 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
710 homedir = os.getenv("HOME")
711 confpath = os.path.join(homedir, "/etc/dak.conf")
712 if os.path.exists(confpath):
713 apt_pkg.ReadConfigFileISC(Cnf,default_config)
715 # We are still in here, so there is no local config file or we do
716 # not allow local files. Do the normal stuff.
717 if Cnf.get("Config::" + res[0] + "::DakConfig"):
718 return Cnf["Config::" + res[0] + "::DakConfig"]
720 return default_config
722 def which_apt_conf_file ():
723 res = socket.gethostbyaddr(socket.gethostname())
724 # In case we allow local config files per user, try if one exists
725 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
726 homedir = os.getenv("HOME")
727 confpath = os.path.join(homedir, "/etc/dak.conf")
728 if os.path.exists(confpath):
729 apt_pkg.ReadConfigFileISC(Cnf,default_config)
731 if Cnf.get("Config::" + res[0] + "::AptConfig"):
732 return Cnf["Config::" + res[0] + "::AptConfig"]
734 return default_apt_config
736 def which_alias_file():
737 hostname = socket.gethostbyaddr(socket.gethostname())[0]
738 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
739 if os.path.exists(aliasfn):
744 ################################################################################
746 def TemplateSubst(map, filename):
747 """ Perform a substition of template """
748 templatefile = open_file(filename)
749 template = templatefile.read()
751 template = template.replace(x, str(map[x]))
755 ################################################################################
757 def fubar(msg, exit_code=1):
758 sys.stderr.write("E: %s\n" % (msg))
762 sys.stderr.write("W: %s\n" % (msg))
764 ################################################################################
766 # Returns the user name with a laughable attempt at rfc822 conformancy
767 # (read: removing stray periods).
769 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
772 return pwd.getpwuid(os.getuid())[0]
774 ################################################################################
784 return ("%d%s" % (c, t))
786 ################################################################################
788 def cc_fix_changes (changes):
789 o = changes.get("architecture", "")
791 del changes["architecture"]
792 changes["architecture"] = {}
794 changes["architecture"][j] = 1
796 def changes_compare (a, b):
797 """ Sort by source name, source version, 'have source', and then by filename """
799 a_changes = parse_changes(a)
804 b_changes = parse_changes(b)
808 cc_fix_changes (a_changes)
809 cc_fix_changes (b_changes)
811 # Sort by source name
812 a_source = a_changes.get("source")
813 b_source = b_changes.get("source")
814 q = cmp (a_source, b_source)
818 # Sort by source version
819 a_version = a_changes.get("version", "0")
820 b_version = b_changes.get("version", "0")
821 q = apt_pkg.VersionCompare(a_version, b_version)
825 # Sort by 'have source'
826 a_has_source = a_changes["architecture"].get("source")
827 b_has_source = b_changes["architecture"].get("source")
828 if a_has_source and not b_has_source:
830 elif b_has_source and not a_has_source:
833 # Fall back to sort by filename
836 ################################################################################
838 def find_next_free (dest, too_many=100):
841 while os.path.exists(dest) and extra < too_many:
842 dest = orig_dest + '.' + repr(extra)
844 if extra >= too_many:
845 raise NoFreeFilenameError
848 ################################################################################
850 def result_join (original, sep = '\t'):
852 for i in xrange(len(original)):
853 if original[i] == None:
854 resultlist.append("")
856 resultlist.append(original[i])
857 return sep.join(resultlist)
859 ################################################################################
861 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
863 for line in str.split('\n'):
865 if line or include_blank_lines:
866 out += "%s%s\n" % (prefix, line)
867 # Strip trailing new line
872 ################################################################################
874 def validate_changes_file_arg(filename, require_changes=1):
876 'filename' is either a .changes or .dak file. If 'filename' is a
877 .dak file, it's changed to be the corresponding .changes file. The
878 function then checks if the .changes file a) exists and b) is
879 readable and returns the .changes filename if so. If there's a
880 problem, the next action depends on the option 'require_changes'
883 - If 'require_changes' == -1, errors are ignored and the .changes
884 filename is returned.
885 - If 'require_changes' == 0, a warning is given and 'None' is returned.
886 - If 'require_changes' == 1, a fatal error is raised.
891 orig_filename = filename
892 if filename.endswith(".dak"):
893 filename = filename[:-4]+".changes"
895 if not filename.endswith(".changes"):
896 error = "invalid file type; not a changes file"
898 if not os.access(filename,os.R_OK):
899 if os.path.exists(filename):
900 error = "permission denied"
902 error = "file not found"
905 if require_changes == 1:
906 fubar("%s: %s." % (orig_filename, error))
907 elif require_changes == 0:
908 warn("Skipping %s - %s" % (orig_filename, error))
910 else: # We only care about the .dak file
915 ################################################################################
918 return (arch != "source" and arch != "all")
920 ################################################################################
922 def join_with_commas_and(list):
923 if len(list) == 0: return "nothing"
924 if len(list) == 1: return list[0]
925 return ", ".join(list[:-1]) + " and " + list[-1]
927 ################################################################################
932 (pkg, version, constraint) = atom
934 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
937 pp_deps.append(pp_dep)
938 return " |".join(pp_deps)
940 ################################################################################
945 ################################################################################
947 def parse_args(Options):
948 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
949 # XXX: This should go away and everything which calls it be converted
950 # to use SQLA properly. For now, we'll just fix it not to use
951 # the old Pg interface though
952 session = DBConn().session()
956 for suitename in split_args(Options["Suite"]):
957 suite = get_suite(suitename, session=session)
958 if suite.suite_id is None:
959 warn("suite '%s' not recognised." % (suite.suite_name))
961 suite_ids_list.append(suite.suite_id)
963 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
965 fubar("No valid suite given.")
970 if Options["Component"]:
971 component_ids_list = []
972 for componentname in split_args(Options["Component"]):
973 component = get_component(componentname, session=session)
974 if component is None:
975 warn("component '%s' not recognised." % (componentname))
977 component_ids_list.append(component.component_id)
978 if component_ids_list:
979 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
981 fubar("No valid component given.")
985 # Process architecture
986 con_architectures = ""
988 if Options["Architecture"]:
990 for archname in split_args(Options["Architecture"]):
991 if archname == "source":
994 arch = get_architecture(archname, session=session)
996 warn("architecture '%s' not recognised." % (archname))
998 arch_ids_list.append(arch.arch_id)
1000 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1002 if not check_source:
1003 fubar("No valid architecture given.")
1007 return (con_suites, con_architectures, con_components, check_source)
1009 ################################################################################
1011 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1012 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1015 tb = sys.exc_info()[2]
1022 frame = frame.f_back
1024 traceback.print_exc()
1026 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1027 frame.f_code.co_filename,
1029 for key, value in frame.f_locals.items():
1030 print "\t%20s = " % key,
1034 print "<unable to print>"
1036 ################################################################################
1038 def try_with_debug(function):
1046 ################################################################################
1048 def arch_compare_sw (a, b):
1050 Function for use in sorting lists of architectures.
1052 Sorts normally except that 'source' dominates all others.
1055 if a == "source" and b == "source":
1064 ################################################################################
1066 def split_args (s, dwim=1):
1068 Split command line arguments which can be separated by either commas
1069 or whitespace. If dwim is set, it will complain about string ending
1070 in comma since this usually means someone did 'dak ls -a i386, m68k
1071 foo' or something and the inevitable confusion resulting from 'm68k'
1072 being treated as an argument is undesirable.
1075 if s.find(",") == -1:
1078 if s[-1:] == "," and dwim:
1079 fubar("split_args: found trailing comma, spurious space maybe?")
1082 ################################################################################
1084 def Dict(**dict): return dict
1086 ########################################
1088 def gpgv_get_status_output(cmd, status_read, status_write):
1090 Our very own version of commands.getouputstatus(), hacked to support
1094 cmd = ['/bin/sh', '-c', cmd]
1095 p2cread, p2cwrite = os.pipe()
1096 c2pread, c2pwrite = os.pipe()
1097 errout, errin = os.pipe()
1107 for i in range(3, 256):
1108 if i != status_write:
1114 os.execvp(cmd[0], cmd)
1120 os.dup2(c2pread, c2pwrite)
1121 os.dup2(errout, errin)
1123 output = status = ""
1125 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1128 r = os.read(fd, 8196)
1130 more_data.append(fd)
1131 if fd == c2pwrite or fd == errin:
1133 elif fd == status_read:
1136 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1138 pid, exit_status = os.waitpid(pid, 0)
1140 os.close(status_write)
1141 os.close(status_read)
1151 return output, status, exit_status
1153 ################################################################################
1155 def process_gpgv_output(status):
1156 # Process the status-fd output
1159 for line in status.split('\n'):
1163 split = line.split()
1165 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1167 (gnupg, keyword) = split[:2]
1168 if gnupg != "[GNUPG:]":
1169 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1172 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1173 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1176 keywords[keyword] = args
1178 return (keywords, internal_error)
1180 ################################################################################
1182 def retrieve_key (filename, keyserver=None, keyring=None):
1184 Retrieve the key that signed 'filename' from 'keyserver' and
1185 add it to 'keyring'. Returns nothing on success, or an error message
1189 # Defaults for keyserver and keyring
1191 keyserver = Cnf["Dinstall::KeyServer"]
1193 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1195 # Ensure the filename contains no shell meta-characters or other badness
1196 if not re_taint_free.match(filename):
1197 return "%s: tainted filename" % (filename)
1199 # Invoke gpgv on the file
1200 status_read, status_write = os.pipe()
1201 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1202 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1204 # Process the status-fd output
1205 (keywords, internal_error) = process_gpgv_output(status)
1207 return internal_error
1209 if not keywords.has_key("NO_PUBKEY"):
1210 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1212 fingerprint = keywords["NO_PUBKEY"][0]
1213 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1214 # it'll try to create a lockfile in /dev. A better solution might
1215 # be a tempfile or something.
1216 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1217 % (Cnf["Dinstall::SigningKeyring"])
1218 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1219 % (keyring, keyserver, fingerprint)
1220 (result, output) = commands.getstatusoutput(cmd)
1222 return "'%s' failed with exit code %s" % (cmd, result)
1226 ################################################################################
1228 def gpg_keyring_args(keyrings=None):
1230 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1232 return " ".join(["--keyring %s" % x for x in keyrings])
1234 ################################################################################
1236 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1238 Check the signature of a file and return the fingerprint if the
1239 signature is valid or 'None' if it's not. The first argument is the
1240 filename whose signature should be checked. The second argument is a
1241 reject function and is called when an error is found. The reject()
1242 function must allow for two arguments: the first is the error message,
1243 the second is an optional prefix string. It's possible for reject()
1244 to be called more than once during an invocation of check_signature().
1245 The third argument is optional and is the name of the files the
1246 detached signature applies to. The fourth argument is optional and is
1247 a *list* of keyrings to use. 'autofetch' can either be None, True or
1248 False. If None, the default behaviour specified in the config will be
1254 # Ensure the filename contains no shell meta-characters or other badness
1255 if not re_taint_free.match(sig_filename):
1256 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1257 return (None, rejects)
1259 if data_filename and not re_taint_free.match(data_filename):
1260 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1261 return (None, rejects)
1264 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1266 # Autofetch the signing key if that's enabled
1267 if autofetch == None:
1268 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1270 error_msg = retrieve_key(sig_filename)
1272 rejects.append(error_msg)
1273 return (None, rejects)
1275 # Build the command line
1276 status_read, status_write = os.pipe()
1277 cmd = "gpgv --status-fd %s %s %s %s" % (
1278 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1280 # Invoke gpgv on the file
1281 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1283 # Process the status-fd output
1284 (keywords, internal_error) = process_gpgv_output(status)
1286 # If we failed to parse the status-fd output, let's just whine and bail now
1288 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1289 rejects.append(internal_error, "")
1290 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1291 return (None, rejects)
1293 # Now check for obviously bad things in the processed output
1294 if keywords.has_key("KEYREVOKED"):
1295 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1296 if keywords.has_key("BADSIG"):
1297 rejects.append("bad signature on %s." % (sig_filename))
1298 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1299 rejects.append("failed to check signature on %s." % (sig_filename))
1300 if keywords.has_key("NO_PUBKEY"):
1301 args = keywords["NO_PUBKEY"]
1304 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1305 if keywords.has_key("BADARMOR"):
1306 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1307 if keywords.has_key("NODATA"):
1308 rejects.append("no signature found in %s." % (sig_filename))
1309 if keywords.has_key("EXPKEYSIG"):
1310 args = keywords["EXPKEYSIG"]
1313 rejects.append("Signature made by expired key 0x%s" % (key))
1314 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1315 args = keywords["KEYEXPIRED"]
1319 if timestamp.count("T") == 0:
1321 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1323 expiredate = "unknown (%s)" % (timestamp)
1325 expiredate = timestamp
1326 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1328 if len(rejects) > 0:
1329 return (None, rejects)
1331 # Next check gpgv exited with a zero return code
1333 rejects.append("gpgv failed while checking %s." % (sig_filename))
1335 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1337 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1338 return (None, rejects)
1340 # Sanity check the good stuff we expect
1341 if not keywords.has_key("VALIDSIG"):
1342 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1344 args = keywords["VALIDSIG"]
1346 rejects.append("internal error while checking signature on %s." % (sig_filename))
1348 fingerprint = args[0]
1349 if not keywords.has_key("GOODSIG"):
1350 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1351 if not keywords.has_key("SIG_ID"):
1352 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1354 # Finally ensure there's not something we don't recognise
1355 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1356 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1357 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1359 for keyword in keywords.keys():
1360 if not known_keywords.has_key(keyword):
1361 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1363 if len(rejects) > 0:
1364 return (None, rejects)
1366 return (fingerprint, [])
1368 ################################################################################
1370 def gpg_get_key_addresses(fingerprint):
1371 """retreive email addresses from gpg key uids for a given fingerprint"""
1372 addresses = key_uid_email_cache.get(fingerprint)
1373 if addresses != None:
1376 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1377 % (gpg_keyring_args(), fingerprint)
1378 (result, output) = commands.getstatusoutput(cmd)
1380 for l in output.split('\n'):
1381 m = re_gpg_uid.match(l)
1383 addresses.add(m.group(1))
1384 key_uid_email_cache[fingerprint] = addresses
1387 ################################################################################
1389 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1391 def wrap(paragraph, max_length, prefix=""):
1395 words = paragraph.split()
1398 word_size = len(word)
1399 if word_size > max_length:
1401 s += line + '\n' + prefix
1402 s += word + '\n' + prefix
1405 new_length = len(line) + word_size + 1
1406 if new_length > max_length:
1407 s += line + '\n' + prefix
1420 ################################################################################
1422 def clean_symlink (src, dest, root):
1424 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1427 src = src.replace(root, '', 1)
1428 dest = dest.replace(root, '', 1)
1429 dest = os.path.dirname(dest)
1430 new_src = '../' * len(dest.split('/'))
1431 return new_src + src
1433 ################################################################################
1435 def temp_filename(directory=None, prefix="dak", suffix=""):
1437 Return a secure and unique filename by pre-creating it.
1438 If 'directory' is non-null, it will be the directory the file is pre-created in.
1439 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1440 If 'suffix' is non-null, the filename will end with it.
1442 Returns a pair (fd, name).
1445 return tempfile.mkstemp(suffix, prefix, directory)
1447 ################################################################################
1449 def temp_dirname(parent=None, prefix="dak", suffix=""):
1451 Return a secure and unique directory by pre-creating it.
1452 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1453 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1454 If 'suffix' is non-null, the filename will end with it.
1456 Returns a pathname to the new directory
1459 return tempfile.mkdtemp(suffix, prefix, parent)
1461 ################################################################################
1463 def is_email_alias(email):
1464 """ checks if the user part of the email is listed in the alias file """
1466 if alias_cache == None:
1467 aliasfn = which_alias_file()
1470 for l in open(aliasfn):
1471 alias_cache.add(l.split(':')[0])
1472 uid = email.split('@')[0]
1473 return uid in alias_cache
1475 ################################################################################
1477 def get_changes_files(dir):
1479 Takes a directory and lists all .changes files in it (as well as chdir'ing
1480 to the directory; this is due to broken behaviour on the part of p-u/p-a
1481 when you're not in the right place)
1483 Returns a list of filenames
1486 # Much of the rest of p-u/p-a depends on being in the right place
1488 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1490 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1492 return changes_files
1494 ################################################################################
1498 Cnf = apt_pkg.newConfiguration()
1499 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1501 if which_conf_file() != default_config:
1502 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1504 ###############################################################################