2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
50 from srcformats import srcformats
51 from collections import defaultdict
53 ################################################################################
55 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
56 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
58 alias_cache = None #: Cache for email alias checks
59 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
61 # (hashname, function, earliest_changes_version)
62 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
63 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
65 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
66 # all situations under lenny's Python.
68 def dak_getstatusoutput(cmd):
69 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
70 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
72 output = "".join(pipe.stdout.readlines())
79 commands.getstatusoutput = dak_getstatusoutput
81 ################################################################################
84 """ Escape html chars """
85 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
87 ################################################################################
89 def open_file(filename, mode='r'):
91 Open C{file}, return fileobject.
93 @type filename: string
94 @param filename: path/filename to open
97 @param mode: open mode
100 @return: open fileobject
102 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
106 f = open(filename, mode)
108 raise CantOpenError, filename
111 ################################################################################
113 def our_raw_input(prompt=""):
115 sys.stdout.write(prompt)
121 sys.stderr.write("\nUser interrupt (^D).\n")
124 ################################################################################
126 def extract_component_from_section(section):
129 if section.find('/') != -1:
130 component = section.split('/')[0]
132 # Expand default component
134 if Cnf.has_key("Component::%s" % section):
139 return (section, component)
141 ################################################################################
143 def parse_deb822(contents, signing_rules=0):
147 # Split the lines in the input, keeping the linebreaks.
148 lines = contents.splitlines(True)
151 raise ParseChangesError, "[Empty changes file]"
153 # Reindex by line number so we can easily verify the format of
159 indexed_lines[index] = line[:-1]
163 num_of_lines = len(indexed_lines.keys())
166 while index < num_of_lines:
168 line = indexed_lines[index]
170 if signing_rules == 1:
172 if index > num_of_lines:
173 raise InvalidDscError, index
174 line = indexed_lines[index]
175 if not line.startswith("-----BEGIN PGP SIGNATURE"):
176 raise InvalidDscError, index
181 if line.startswith("-----BEGIN PGP SIGNATURE"):
183 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
185 if signing_rules == 1:
186 while index < num_of_lines and line != "":
188 line = indexed_lines[index]
190 # If we're not inside the signed data, don't process anything
191 if signing_rules >= 0 and not inside_signature:
193 slf = re_single_line_field.match(line)
195 field = slf.groups()[0].lower()
196 changes[field] = slf.groups()[1]
200 changes[field] += '\n'
202 mlf = re_multi_line_field.match(line)
205 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
206 if first == 1 and changes[field] != "":
207 changes[field] += '\n'
209 changes[field] += mlf.groups()[0] + '\n'
213 if signing_rules == 1 and inside_signature:
214 raise InvalidDscError, index
216 changes["filecontents"] = "".join(lines)
218 if changes.has_key("source"):
219 # Strip the source version in brackets from the source field,
220 # put it in the "source-version" field instead.
221 srcver = re_srchasver.search(changes["source"])
223 changes["source"] = srcver.group(1)
224 changes["source-version"] = srcver.group(2)
227 raise ParseChangesError, error
231 ################################################################################
233 def parse_changes(filename, signing_rules=0):
235 Parses a changes file and returns a dictionary where each field is a
236 key. The mandatory first argument is the filename of the .changes
239 signing_rules is an optional argument:
241 - If signing_rules == -1, no signature is required.
242 - If signing_rules == 0 (the default), a signature is required.
243 - If signing_rules == 1, it turns on the same strict format checking
246 The rules for (signing_rules == 1)-mode are:
248 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
249 followed by any PGP header data and must end with a blank line.
251 - The data section must end with a blank line and must be followed by
252 "-----BEGIN PGP SIGNATURE-----".
255 changes_in = open_file(filename)
256 content = changes_in.read()
259 unicode(content, 'utf-8')
261 raise ChangesUnicodeError, "Changes file not proper utf-8"
262 return parse_deb822(content, signing_rules)
264 ################################################################################
266 def hash_key(hashname):
267 return '%ssum' % hashname
269 ################################################################################
271 def create_hash(where, files, hashname, hashfunc):
273 create_hash extends the passed files dict with the given hash by
274 iterating over all files on disk and passing them to the hashing
279 for f in files.keys():
281 file_handle = open_file(f)
282 except CantOpenError:
283 rejmsg.append("Could not open file %s for checksumming" % (f))
286 files[f][hash_key(hashname)] = hashfunc(file_handle)
291 ################################################################################
293 def check_hash(where, files, hashname, hashfunc):
295 check_hash checks the given hash in the files dict against the actual
296 files on disk. The hash values need to be present consistently in
297 all file entries. It does not modify its input in any way.
301 for f in files.keys():
305 file_handle = open_file(f)
307 # Check for the hash entry, to not trigger a KeyError.
308 if not files[f].has_key(hash_key(hashname)):
309 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
313 # Actually check the hash for correctness.
314 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
315 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
317 except CantOpenError:
318 # TODO: This happens when the file is in the pool.
319 # warn("Cannot open file %s" % f)
326 ################################################################################
328 def check_size(where, files):
330 check_size checks the file sizes in the passed files dict against the
335 for f in files.keys():
340 # TODO: This happens when the file is in the pool.
344 actual_size = entry[stat.ST_SIZE]
345 size = int(files[f]["size"])
346 if size != actual_size:
347 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
348 % (f, actual_size, size, where))
351 ################################################################################
353 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
355 Verify that the files listed in the Files field of the .dsc are
356 those expected given the announced Format.
358 @type dsc_filename: string
359 @param dsc_filename: path of .dsc file
362 @param dsc: the content of the .dsc parsed by C{parse_changes()}
364 @type dsc_files: dict
365 @param dsc_files: the file list returned by C{build_file_list()}
368 @return: all errors detected
372 # Parse the file if needed
374 dsc = parse_changes(dsc_filename, signing_rules=1);
376 if dsc_files is None:
377 dsc_files = build_file_list(dsc, is_a_dsc=1)
379 # Ensure .dsc lists proper set of source files according to the format
381 has = defaultdict(lambda: 0)
384 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
385 (r'diff.gz', ('debian_diff',)),
386 (r'tar.gz', ('native_tar_gz', 'native_tar')),
387 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
388 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
389 (r'tar\.(gz|bz2)', ('native_tar',)),
390 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
393 for f in dsc_files.keys():
394 m = re_issource.match(f)
396 rejmsg.append("%s: %s in Files field not recognised as source."
400 # Populate 'has' dictionary by resolving keys in lookup table
402 for regex, keys in ftype_lookup:
403 if re.match(regex, m.group(3)):
409 # File does not match anything in lookup table; reject
411 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
413 # Check for multiple files
414 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
415 if has[file_type] > 1:
416 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
418 # Source format specific tests
419 for format in srcformats:
420 if format.re_format.match(dsc['format']):
422 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
428 ################################################################################
430 def check_hash_fields(what, manifest):
432 check_hash_fields ensures that there are no checksum fields in the
433 given dict that we do not know about.
437 hashes = map(lambda x: x[0], known_hashes)
438 for field in manifest:
439 if field.startswith("checksums-"):
440 hashname = field.split("-",1)[1]
441 if hashname not in hashes:
442 rejmsg.append("Unsupported checksum field for %s "\
443 "in %s" % (hashname, what))
446 ################################################################################
448 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
449 if format >= version:
450 # The version should contain the specified hash.
453 # Import hashes from the changes
454 rejmsg = parse_checksums(".changes", files, changes, hashname)
458 # We need to calculate the hash because it can't possibly
461 return func(".changes", files, hashname, hashfunc)
463 # We could add the orig which might be in the pool to the files dict to
464 # access the checksums easily.
466 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
468 ensure_dsc_hashes' task is to ensure that each and every *present* hash
469 in the dsc is correct, i.e. identical to the changes file and if necessary
470 the pool. The latter task is delegated to check_hash.
474 if not dsc.has_key('Checksums-%s' % (hashname,)):
476 # Import hashes from the dsc
477 parse_checksums(".dsc", dsc_files, dsc, hashname)
479 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
482 ################################################################################
484 def parse_checksums(where, files, manifest, hashname):
486 field = 'checksums-%s' % hashname
487 if not field in manifest:
489 for line in manifest[field].split('\n'):
492 clist = line.strip().split(' ')
494 checksum, size, checkfile = clist
496 rejmsg.append("Cannot parse checksum line [%s]" % (line))
498 if not files.has_key(checkfile):
499 # TODO: check for the file's entry in the original files dict, not
500 # the one modified by (auto)byhand and other weird stuff
501 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
502 # (file, hashname, where))
504 if not files[checkfile]["size"] == size:
505 rejmsg.append("%s: size differs for files and checksums-%s entry "\
506 "in %s" % (checkfile, hashname, where))
508 files[checkfile][hash_key(hashname)] = checksum
509 for f in files.keys():
510 if not files[f].has_key(hash_key(hashname)):
511 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
515 ################################################################################
517 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
519 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
522 # Make sure we have a Files: field to parse...
523 if not changes.has_key(field):
524 raise NoFilesFieldError
526 format = SourceFormat.parse_format(changes.get["format"])
527 SourceFormat.validate_format(format, is_a_dsc=False, field='files')
529 includes_section = (not is_a_dsc) and field == "files"
531 # Parse each entry/line:
532 for i in changes[field].split('\n'):
536 section = priority = ""
539 (md5, size, section, priority, name) = s
541 (md5, size, name) = s
543 raise ParseChangesError, i
550 (section, component) = extract_component_from_section(section)
552 files[name] = Dict(size=size, section=section,
553 priority=priority, component=component)
554 files[name][hashname] = md5
558 ################################################################################
560 def send_mail (message, filename=""):
561 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
563 # If we've been passed a string dump it into a temporary file
565 (fd, filename) = tempfile.mkstemp()
566 os.write (fd, message)
569 if Cnf.has_key("Dinstall::MailWhiteList") and \
570 Cnf["Dinstall::MailWhiteList"] != "":
571 message_in = open_file(filename)
572 message_raw = modemail.message_from_file(message_in)
576 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
578 for line in whitelist_in:
579 if not re_whitespace_comment.match(line):
580 if re_re_mark.match(line):
581 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
583 whitelist.append(re.compile(re.escape(line.strip())))
588 fields = ["To", "Bcc", "Cc"]
591 value = message_raw.get(field, None)
594 for item in value.split(","):
595 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
601 if not mail_whitelisted:
602 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
606 # Doesn't have any mail in whitelist so remove the header
608 del message_raw[field]
610 message_raw.replace_header(field, string.join(match, ", "))
612 # Change message fields in order if we don't have a To header
613 if not message_raw.has_key("To"):
616 if message_raw.has_key(field):
617 message_raw[fields[-1]] = message_raw[field]
618 del message_raw[field]
621 # Clean up any temporary files
622 # and return, as we removed all recipients.
624 os.unlink (filename);
627 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
628 os.write (fd, message_raw.as_string(True));
632 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
634 raise SendmailFailedError, output
636 # Clean up any temporary files
640 ################################################################################
642 def poolify (source, component):
645 if source[:3] == "lib":
646 return component + source[:4] + '/' + source + '/'
648 return component + source[:1] + '/' + source + '/'
650 ################################################################################
652 def move (src, dest, overwrite = 0, perms = 0664):
653 if os.path.exists(dest) and os.path.isdir(dest):
656 dest_dir = os.path.dirname(dest)
657 if not os.path.exists(dest_dir):
658 umask = os.umask(00000)
659 os.makedirs(dest_dir, 02775)
661 #print "Moving %s to %s..." % (src, dest)
662 if os.path.exists(dest) and os.path.isdir(dest):
663 dest += '/' + os.path.basename(src)
664 # Don't overwrite unless forced to
665 if os.path.exists(dest):
667 fubar("Can't move %s to %s - file already exists." % (src, dest))
669 if not os.access(dest, os.W_OK):
670 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
671 shutil.copy2(src, dest)
672 os.chmod(dest, perms)
675 def copy (src, dest, overwrite = 0, perms = 0664):
676 if os.path.exists(dest) and os.path.isdir(dest):
679 dest_dir = os.path.dirname(dest)
680 if not os.path.exists(dest_dir):
681 umask = os.umask(00000)
682 os.makedirs(dest_dir, 02775)
684 #print "Copying %s to %s..." % (src, dest)
685 if os.path.exists(dest) and os.path.isdir(dest):
686 dest += '/' + os.path.basename(src)
687 # Don't overwrite unless forced to
688 if os.path.exists(dest):
690 raise FileExistsError
692 if not os.access(dest, os.W_OK):
693 raise CantOverwriteError
694 shutil.copy2(src, dest)
695 os.chmod(dest, perms)
697 ################################################################################
700 res = socket.gethostbyaddr(socket.gethostname())
701 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
702 if database_hostname:
703 return database_hostname
707 def which_conf_file ():
708 res = socket.gethostbyaddr(socket.gethostname())
709 # In case we allow local config files per user, try if one exists
710 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
711 homedir = os.getenv("HOME")
712 confpath = os.path.join(homedir, "/etc/dak.conf")
713 if os.path.exists(confpath):
714 apt_pkg.ReadConfigFileISC(Cnf,default_config)
716 # We are still in here, so there is no local config file or we do
717 # not allow local files. Do the normal stuff.
718 if Cnf.get("Config::" + res[0] + "::DakConfig"):
719 return Cnf["Config::" + res[0] + "::DakConfig"]
721 return default_config
723 def which_apt_conf_file ():
724 res = socket.gethostbyaddr(socket.gethostname())
725 # In case we allow local config files per user, try if one exists
726 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
727 homedir = os.getenv("HOME")
728 confpath = os.path.join(homedir, "/etc/dak.conf")
729 if os.path.exists(confpath):
730 apt_pkg.ReadConfigFileISC(Cnf,default_config)
732 if Cnf.get("Config::" + res[0] + "::AptConfig"):
733 return Cnf["Config::" + res[0] + "::AptConfig"]
735 return default_apt_config
737 def which_alias_file():
738 hostname = socket.gethostbyaddr(socket.gethostname())[0]
739 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
740 if os.path.exists(aliasfn):
745 ################################################################################
747 def TemplateSubst(map, filename):
748 """ Perform a substition of template """
749 templatefile = open_file(filename)
750 template = templatefile.read()
752 template = template.replace(x, str(map[x]))
756 ################################################################################
758 def fubar(msg, exit_code=1):
759 sys.stderr.write("E: %s\n" % (msg))
763 sys.stderr.write("W: %s\n" % (msg))
765 ################################################################################
767 # Returns the user name with a laughable attempt at rfc822 conformancy
768 # (read: removing stray periods).
770 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
773 return pwd.getpwuid(os.getuid())[0]
775 ################################################################################
785 return ("%d%s" % (c, t))
787 ################################################################################
789 def cc_fix_changes (changes):
790 o = changes.get("architecture", "")
792 del changes["architecture"]
793 changes["architecture"] = {}
795 changes["architecture"][j] = 1
797 def changes_compare (a, b):
798 """ Sort by source name, source version, 'have source', and then by filename """
800 a_changes = parse_changes(a)
805 b_changes = parse_changes(b)
809 cc_fix_changes (a_changes)
810 cc_fix_changes (b_changes)
812 # Sort by source name
813 a_source = a_changes.get("source")
814 b_source = b_changes.get("source")
815 q = cmp (a_source, b_source)
819 # Sort by source version
820 a_version = a_changes.get("version", "0")
821 b_version = b_changes.get("version", "0")
822 q = apt_pkg.VersionCompare(a_version, b_version)
826 # Sort by 'have source'
827 a_has_source = a_changes["architecture"].get("source")
828 b_has_source = b_changes["architecture"].get("source")
829 if a_has_source and not b_has_source:
831 elif b_has_source and not a_has_source:
834 # Fall back to sort by filename
837 ################################################################################
839 def find_next_free (dest, too_many=100):
842 while os.path.exists(dest) and extra < too_many:
843 dest = orig_dest + '.' + repr(extra)
845 if extra >= too_many:
846 raise NoFreeFilenameError
849 ################################################################################
851 def result_join (original, sep = '\t'):
853 for i in xrange(len(original)):
854 if original[i] == None:
855 resultlist.append("")
857 resultlist.append(original[i])
858 return sep.join(resultlist)
860 ################################################################################
862 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
864 for line in str.split('\n'):
866 if line or include_blank_lines:
867 out += "%s%s\n" % (prefix, line)
868 # Strip trailing new line
873 ################################################################################
875 def validate_changes_file_arg(filename, require_changes=1):
877 'filename' is either a .changes or .dak file. If 'filename' is a
878 .dak file, it's changed to be the corresponding .changes file. The
879 function then checks if the .changes file a) exists and b) is
880 readable and returns the .changes filename if so. If there's a
881 problem, the next action depends on the option 'require_changes'
884 - If 'require_changes' == -1, errors are ignored and the .changes
885 filename is returned.
886 - If 'require_changes' == 0, a warning is given and 'None' is returned.
887 - If 'require_changes' == 1, a fatal error is raised.
892 orig_filename = filename
893 if filename.endswith(".dak"):
894 filename = filename[:-4]+".changes"
896 if not filename.endswith(".changes"):
897 error = "invalid file type; not a changes file"
899 if not os.access(filename,os.R_OK):
900 if os.path.exists(filename):
901 error = "permission denied"
903 error = "file not found"
906 if require_changes == 1:
907 fubar("%s: %s." % (orig_filename, error))
908 elif require_changes == 0:
909 warn("Skipping %s - %s" % (orig_filename, error))
911 else: # We only care about the .dak file
916 ################################################################################
919 return (arch != "source" and arch != "all")
921 ################################################################################
923 def join_with_commas_and(list):
924 if len(list) == 0: return "nothing"
925 if len(list) == 1: return list[0]
926 return ", ".join(list[:-1]) + " and " + list[-1]
928 ################################################################################
933 (pkg, version, constraint) = atom
935 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
938 pp_deps.append(pp_dep)
939 return " |".join(pp_deps)
941 ################################################################################
946 ################################################################################
948 def parse_args(Options):
949 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
950 # XXX: This should go away and everything which calls it be converted
951 # to use SQLA properly. For now, we'll just fix it not to use
952 # the old Pg interface though
953 session = DBConn().session()
957 for suitename in split_args(Options["Suite"]):
958 suite = get_suite(suitename, session=session)
959 if suite.suite_id is None:
960 warn("suite '%s' not recognised." % (suite.suite_name))
962 suite_ids_list.append(suite.suite_id)
964 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
966 fubar("No valid suite given.")
971 if Options["Component"]:
972 component_ids_list = []
973 for componentname in split_args(Options["Component"]):
974 component = get_component(componentname, session=session)
975 if component is None:
976 warn("component '%s' not recognised." % (componentname))
978 component_ids_list.append(component.component_id)
979 if component_ids_list:
980 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
982 fubar("No valid component given.")
986 # Process architecture
987 con_architectures = ""
989 if Options["Architecture"]:
991 for archname in split_args(Options["Architecture"]):
992 if archname == "source":
995 arch = get_architecture(archname, session=session)
997 warn("architecture '%s' not recognised." % (archname))
999 arch_ids_list.append(arch.arch_id)
1001 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1003 if not check_source:
1004 fubar("No valid architecture given.")
1008 return (con_suites, con_architectures, con_components, check_source)
1010 ################################################################################
1012 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1013 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1016 tb = sys.exc_info()[2]
1023 frame = frame.f_back
1025 traceback.print_exc()
1027 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1028 frame.f_code.co_filename,
1030 for key, value in frame.f_locals.items():
1031 print "\t%20s = " % key,
1035 print "<unable to print>"
1037 ################################################################################
1039 def try_with_debug(function):
1047 ################################################################################
1049 def arch_compare_sw (a, b):
1051 Function for use in sorting lists of architectures.
1053 Sorts normally except that 'source' dominates all others.
1056 if a == "source" and b == "source":
1065 ################################################################################
1067 def split_args (s, dwim=1):
1069 Split command line arguments which can be separated by either commas
1070 or whitespace. If dwim is set, it will complain about string ending
1071 in comma since this usually means someone did 'dak ls -a i386, m68k
1072 foo' or something and the inevitable confusion resulting from 'm68k'
1073 being treated as an argument is undesirable.
1076 if s.find(",") == -1:
1079 if s[-1:] == "," and dwim:
1080 fubar("split_args: found trailing comma, spurious space maybe?")
1083 ################################################################################
1085 def Dict(**dict): return dict
1087 ########################################
1089 def gpgv_get_status_output(cmd, status_read, status_write):
1091 Our very own version of commands.getouputstatus(), hacked to support
1095 cmd = ['/bin/sh', '-c', cmd]
1096 p2cread, p2cwrite = os.pipe()
1097 c2pread, c2pwrite = os.pipe()
1098 errout, errin = os.pipe()
1108 for i in range(3, 256):
1109 if i != status_write:
1115 os.execvp(cmd[0], cmd)
1121 os.dup2(c2pread, c2pwrite)
1122 os.dup2(errout, errin)
1124 output = status = ""
1126 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1129 r = os.read(fd, 8196)
1131 more_data.append(fd)
1132 if fd == c2pwrite or fd == errin:
1134 elif fd == status_read:
1137 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1139 pid, exit_status = os.waitpid(pid, 0)
1141 os.close(status_write)
1142 os.close(status_read)
1152 return output, status, exit_status
1154 ################################################################################
1156 def process_gpgv_output(status):
1157 # Process the status-fd output
1160 for line in status.split('\n'):
1164 split = line.split()
1166 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1168 (gnupg, keyword) = split[:2]
1169 if gnupg != "[GNUPG:]":
1170 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1173 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1174 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1177 keywords[keyword] = args
1179 return (keywords, internal_error)
1181 ################################################################################
1183 def retrieve_key (filename, keyserver=None, keyring=None):
1185 Retrieve the key that signed 'filename' from 'keyserver' and
1186 add it to 'keyring'. Returns nothing on success, or an error message
1190 # Defaults for keyserver and keyring
1192 keyserver = Cnf["Dinstall::KeyServer"]
1194 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1196 # Ensure the filename contains no shell meta-characters or other badness
1197 if not re_taint_free.match(filename):
1198 return "%s: tainted filename" % (filename)
1200 # Invoke gpgv on the file
1201 status_read, status_write = os.pipe()
1202 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1203 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1205 # Process the status-fd output
1206 (keywords, internal_error) = process_gpgv_output(status)
1208 return internal_error
1210 if not keywords.has_key("NO_PUBKEY"):
1211 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1213 fingerprint = keywords["NO_PUBKEY"][0]
1214 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1215 # it'll try to create a lockfile in /dev. A better solution might
1216 # be a tempfile or something.
1217 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1218 % (Cnf["Dinstall::SigningKeyring"])
1219 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1220 % (keyring, keyserver, fingerprint)
1221 (result, output) = commands.getstatusoutput(cmd)
1223 return "'%s' failed with exit code %s" % (cmd, result)
1227 ################################################################################
1229 def gpg_keyring_args(keyrings=None):
1231 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1233 return " ".join(["--keyring %s" % x for x in keyrings])
1235 ################################################################################
1237 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1239 Check the signature of a file and return the fingerprint if the
1240 signature is valid or 'None' if it's not. The first argument is the
1241 filename whose signature should be checked. The second argument is a
1242 reject function and is called when an error is found. The reject()
1243 function must allow for two arguments: the first is the error message,
1244 the second is an optional prefix string. It's possible for reject()
1245 to be called more than once during an invocation of check_signature().
1246 The third argument is optional and is the name of the files the
1247 detached signature applies to. The fourth argument is optional and is
1248 a *list* of keyrings to use. 'autofetch' can either be None, True or
1249 False. If None, the default behaviour specified in the config will be
1255 # Ensure the filename contains no shell meta-characters or other badness
1256 if not re_taint_free.match(sig_filename):
1257 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1258 return (None, rejects)
1260 if data_filename and not re_taint_free.match(data_filename):
1261 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1262 return (None, rejects)
1265 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1267 # Autofetch the signing key if that's enabled
1268 if autofetch == None:
1269 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1271 error_msg = retrieve_key(sig_filename)
1273 rejects.append(error_msg)
1274 return (None, rejects)
1276 # Build the command line
1277 status_read, status_write = os.pipe()
1278 cmd = "gpgv --status-fd %s %s %s %s" % (
1279 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1281 # Invoke gpgv on the file
1282 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1284 # Process the status-fd output
1285 (keywords, internal_error) = process_gpgv_output(status)
1287 # If we failed to parse the status-fd output, let's just whine and bail now
1289 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1290 rejects.append(internal_error, "")
1291 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1292 return (None, rejects)
1294 # Now check for obviously bad things in the processed output
1295 if keywords.has_key("KEYREVOKED"):
1296 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1297 if keywords.has_key("BADSIG"):
1298 rejects.append("bad signature on %s." % (sig_filename))
1299 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1300 rejects.append("failed to check signature on %s." % (sig_filename))
1301 if keywords.has_key("NO_PUBKEY"):
1302 args = keywords["NO_PUBKEY"]
1305 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1306 if keywords.has_key("BADARMOR"):
1307 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1308 if keywords.has_key("NODATA"):
1309 rejects.append("no signature found in %s." % (sig_filename))
1310 if keywords.has_key("EXPKEYSIG"):
1311 args = keywords["EXPKEYSIG"]
1314 rejects.append("Signature made by expired key 0x%s" % (key))
1315 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1316 args = keywords["KEYEXPIRED"]
1320 if timestamp.count("T") == 0:
1322 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1324 expiredate = "unknown (%s)" % (timestamp)
1326 expiredate = timestamp
1327 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1329 if len(rejects) > 0:
1330 return (None, rejects)
1332 # Next check gpgv exited with a zero return code
1334 rejects.append("gpgv failed while checking %s." % (sig_filename))
1336 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1338 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1339 return (None, rejects)
1341 # Sanity check the good stuff we expect
1342 if not keywords.has_key("VALIDSIG"):
1343 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1345 args = keywords["VALIDSIG"]
1347 rejects.append("internal error while checking signature on %s." % (sig_filename))
1349 fingerprint = args[0]
1350 if not keywords.has_key("GOODSIG"):
1351 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1352 if not keywords.has_key("SIG_ID"):
1353 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1355 # Finally ensure there's not something we don't recognise
1356 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1357 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1358 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1360 for keyword in keywords.keys():
1361 if not known_keywords.has_key(keyword):
1362 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1364 if len(rejects) > 0:
1365 return (None, rejects)
1367 return (fingerprint, [])
1369 ################################################################################
1371 def gpg_get_key_addresses(fingerprint):
1372 """retreive email addresses from gpg key uids for a given fingerprint"""
1373 addresses = key_uid_email_cache.get(fingerprint)
1374 if addresses != None:
1377 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1378 % (gpg_keyring_args(), fingerprint)
1379 (result, output) = commands.getstatusoutput(cmd)
1381 for l in output.split('\n'):
1382 m = re_gpg_uid.match(l)
1384 addresses.add(m.group(1))
1385 key_uid_email_cache[fingerprint] = addresses
1388 ################################################################################
1390 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1392 def wrap(paragraph, max_length, prefix=""):
1396 words = paragraph.split()
1399 word_size = len(word)
1400 if word_size > max_length:
1402 s += line + '\n' + prefix
1403 s += word + '\n' + prefix
1406 new_length = len(line) + word_size + 1
1407 if new_length > max_length:
1408 s += line + '\n' + prefix
1421 ################################################################################
1423 def clean_symlink (src, dest, root):
1425 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1428 src = src.replace(root, '', 1)
1429 dest = dest.replace(root, '', 1)
1430 dest = os.path.dirname(dest)
1431 new_src = '../' * len(dest.split('/'))
1432 return new_src + src
1434 ################################################################################
1436 def temp_filename(directory=None, prefix="dak", suffix=""):
1438 Return a secure and unique filename by pre-creating it.
1439 If 'directory' is non-null, it will be the directory the file is pre-created in.
1440 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1441 If 'suffix' is non-null, the filename will end with it.
1443 Returns a pair (fd, name).
1446 return tempfile.mkstemp(suffix, prefix, directory)
1448 ################################################################################
1450 def temp_dirname(parent=None, prefix="dak", suffix=""):
1452 Return a secure and unique directory by pre-creating it.
1453 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1454 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1455 If 'suffix' is non-null, the filename will end with it.
1457 Returns a pathname to the new directory
1460 return tempfile.mkdtemp(suffix, prefix, parent)
1462 ################################################################################
1464 def is_email_alias(email):
1465 """ checks if the user part of the email is listed in the alias file """
1467 if alias_cache == None:
1468 aliasfn = which_alias_file()
1471 for l in open(aliasfn):
1472 alias_cache.add(l.split(':')[0])
1473 uid = email.split('@')[0]
1474 return uid in alias_cache
1476 ################################################################################
1478 def get_changes_files(dir):
1480 Takes a directory and lists all .changes files in it (as well as chdir'ing
1481 to the directory; this is due to broken behaviour on the part of p-u/p-a
1482 when you're not in the right place)
1484 Returns a list of filenames
1487 # Much of the rest of p-u/p-a depends on being in the right place
1489 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1491 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1493 return changes_files
1495 ################################################################################
1499 Cnf = apt_pkg.newConfiguration()
1500 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1502 if which_conf_file() != default_config:
1503 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1505 ###############################################################################