2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from formats import parse_format, validate_changes_format
52 from srcformats import get_format_from_string
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
68 # code in lenny's Python. This also affects commands.getoutput and
70 def dak_getstatusoutput(cmd):
71 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
72 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
74 output = "".join(pipe.stdout.readlines())
76 if output[-1:] == '\n':
84 commands.getstatusoutput = dak_getstatusoutput
86 ################################################################################
89 """ Escape html chars """
90 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
92 ################################################################################
94 def open_file(filename, mode='r'):
96 Open C{file}, return fileobject.
98 @type filename: string
99 @param filename: path/filename to open
102 @param mode: open mode
105 @return: open fileobject
107 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
111 f = open(filename, mode)
113 raise CantOpenError, filename
116 ################################################################################
118 def our_raw_input(prompt=""):
120 sys.stdout.write(prompt)
126 sys.stderr.write("\nUser interrupt (^D).\n")
129 ################################################################################
131 def extract_component_from_section(section):
134 if section.find('/') != -1:
135 component = section.split('/')[0]
137 # Expand default component
139 if Cnf.has_key("Component::%s" % section):
144 return (section, component)
146 ################################################################################
148 def parse_deb822(contents, signing_rules=0):
152 # Split the lines in the input, keeping the linebreaks.
153 lines = contents.splitlines(True)
156 raise ParseChangesError, "[Empty changes file]"
158 # Reindex by line number so we can easily verify the format of
164 indexed_lines[index] = line[:-1]
168 num_of_lines = len(indexed_lines.keys())
171 while index < num_of_lines:
173 line = indexed_lines[index]
175 if signing_rules == 1:
177 if index > num_of_lines:
178 raise InvalidDscError, index
179 line = indexed_lines[index]
180 if not line.startswith("-----BEGIN PGP SIGNATURE"):
181 raise InvalidDscError, index
186 if line.startswith("-----BEGIN PGP SIGNATURE"):
188 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
190 if signing_rules == 1:
191 while index < num_of_lines and line != "":
193 line = indexed_lines[index]
195 # If we're not inside the signed data, don't process anything
196 if signing_rules >= 0 and not inside_signature:
198 slf = re_single_line_field.match(line)
200 field = slf.groups()[0].lower()
201 changes[field] = slf.groups()[1]
205 changes[field] += '\n'
207 mlf = re_multi_line_field.match(line)
210 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
211 if first == 1 and changes[field] != "":
212 changes[field] += '\n'
214 changes[field] += mlf.groups()[0] + '\n'
218 if signing_rules == 1 and inside_signature:
219 raise InvalidDscError, index
221 changes["filecontents"] = "".join(lines)
223 if changes.has_key("source"):
224 # Strip the source version in brackets from the source field,
225 # put it in the "source-version" field instead.
226 srcver = re_srchasver.search(changes["source"])
228 changes["source"] = srcver.group(1)
229 changes["source-version"] = srcver.group(2)
232 raise ParseChangesError, error
236 ################################################################################
238 def parse_changes(filename, signing_rules=0):
240 Parses a changes file and returns a dictionary where each field is a
241 key. The mandatory first argument is the filename of the .changes
244 signing_rules is an optional argument:
246 - If signing_rules == -1, no signature is required.
247 - If signing_rules == 0 (the default), a signature is required.
248 - If signing_rules == 1, it turns on the same strict format checking
251 The rules for (signing_rules == 1)-mode are:
253 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
254 followed by any PGP header data and must end with a blank line.
256 - The data section must end with a blank line and must be followed by
257 "-----BEGIN PGP SIGNATURE-----".
260 changes_in = open_file(filename)
261 content = changes_in.read()
264 unicode(content, 'utf-8')
266 raise ChangesUnicodeError, "Changes file not proper utf-8"
267 return parse_deb822(content, signing_rules)
269 ################################################################################
271 def hash_key(hashname):
272 return '%ssum' % hashname
274 ################################################################################
276 def create_hash(where, files, hashname, hashfunc):
278 create_hash extends the passed files dict with the given hash by
279 iterating over all files on disk and passing them to the hashing
284 for f in files.keys():
286 file_handle = open_file(f)
287 except CantOpenError:
288 rejmsg.append("Could not open file %s for checksumming" % (f))
291 files[f][hash_key(hashname)] = hashfunc(file_handle)
296 ################################################################################
298 def check_hash(where, files, hashname, hashfunc):
300 check_hash checks the given hash in the files dict against the actual
301 files on disk. The hash values need to be present consistently in
302 all file entries. It does not modify its input in any way.
306 for f in files.keys():
310 file_handle = open_file(f)
312 # Check for the hash entry, to not trigger a KeyError.
313 if not files[f].has_key(hash_key(hashname)):
314 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
318 # Actually check the hash for correctness.
319 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
320 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
322 except CantOpenError:
323 # TODO: This happens when the file is in the pool.
324 # warn("Cannot open file %s" % f)
331 ################################################################################
333 def check_size(where, files):
335 check_size checks the file sizes in the passed files dict against the
340 for f in files.keys():
345 # TODO: This happens when the file is in the pool.
349 actual_size = entry[stat.ST_SIZE]
350 size = int(files[f]["size"])
351 if size != actual_size:
352 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
353 % (f, actual_size, size, where))
356 ################################################################################
358 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
360 Verify that the files listed in the Files field of the .dsc are
361 those expected given the announced Format.
363 @type dsc_filename: string
364 @param dsc_filename: path of .dsc file
367 @param dsc: the content of the .dsc parsed by C{parse_changes()}
369 @type dsc_files: dict
370 @param dsc_files: the file list returned by C{build_file_list()}
373 @return: all errors detected
377 # Parse the file if needed
379 dsc = parse_changes(dsc_filename, signing_rules=1);
381 if dsc_files is None:
382 dsc_files = build_file_list(dsc, is_a_dsc=1)
384 # Ensure .dsc lists proper set of source files according to the format
386 has = defaultdict(lambda: 0)
389 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
390 (r'diff.gz', ('debian_diff',)),
391 (r'tar.gz', ('native_tar_gz', 'native_tar')),
392 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
393 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
394 (r'tar\.(gz|bz2)', ('native_tar',)),
395 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
398 for f in dsc_files.keys():
399 m = re_issource.match(f)
401 rejmsg.append("%s: %s in Files field not recognised as source."
405 # Populate 'has' dictionary by resolving keys in lookup table
407 for regex, keys in ftype_lookup:
408 if re.match(regex, m.group(3)):
414 # File does not match anything in lookup table; reject
416 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
418 # Check for multiple files
419 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
420 if has[file_type] > 1:
421 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
423 # Source format specific tests
425 format = get_format_from_string(dsc['format'])
427 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
430 except UnknownFormatError:
431 # Not an error here for now
436 ################################################################################
438 def check_hash_fields(what, manifest):
440 check_hash_fields ensures that there are no checksum fields in the
441 given dict that we do not know about.
445 hashes = map(lambda x: x[0], known_hashes)
446 for field in manifest:
447 if field.startswith("checksums-"):
448 hashname = field.split("-",1)[1]
449 if hashname not in hashes:
450 rejmsg.append("Unsupported checksum field for %s "\
451 "in %s" % (hashname, what))
454 ################################################################################
456 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
457 if format >= version:
458 # The version should contain the specified hash.
461 # Import hashes from the changes
462 rejmsg = parse_checksums(".changes", files, changes, hashname)
466 # We need to calculate the hash because it can't possibly
469 return func(".changes", files, hashname, hashfunc)
471 # We could add the orig which might be in the pool to the files dict to
472 # access the checksums easily.
474 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
476 ensure_dsc_hashes' task is to ensure that each and every *present* hash
477 in the dsc is correct, i.e. identical to the changes file and if necessary
478 the pool. The latter task is delegated to check_hash.
482 if not dsc.has_key('Checksums-%s' % (hashname,)):
484 # Import hashes from the dsc
485 parse_checksums(".dsc", dsc_files, dsc, hashname)
487 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
490 ################################################################################
492 def parse_checksums(where, files, manifest, hashname):
494 field = 'checksums-%s' % hashname
495 if not field in manifest:
497 for line in manifest[field].split('\n'):
500 clist = line.strip().split(' ')
502 checksum, size, checkfile = clist
504 rejmsg.append("Cannot parse checksum line [%s]" % (line))
506 if not files.has_key(checkfile):
507 # TODO: check for the file's entry in the original files dict, not
508 # the one modified by (auto)byhand and other weird stuff
509 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
510 # (file, hashname, where))
512 if not files[checkfile]["size"] == size:
513 rejmsg.append("%s: size differs for files and checksums-%s entry "\
514 "in %s" % (checkfile, hashname, where))
516 files[checkfile][hash_key(hashname)] = checksum
517 for f in files.keys():
518 if not files[f].has_key(hash_key(hashname)):
519 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
523 ################################################################################
525 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
527 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
530 # Make sure we have a Files: field to parse...
531 if not changes.has_key(field):
532 raise NoFilesFieldError
534 # Validate .changes Format: field
536 validate_changes_format(parse_format(changes['format']), field)
538 includes_section = (not is_a_dsc) and field == "files"
540 # Parse each entry/line:
541 for i in changes[field].split('\n'):
545 section = priority = ""
548 (md5, size, section, priority, name) = s
550 (md5, size, name) = s
552 raise ParseChangesError, i
559 (section, component) = extract_component_from_section(section)
561 files[name] = dict(size=size, section=section,
562 priority=priority, component=component)
563 files[name][hashname] = md5
567 ################################################################################
569 def send_mail (message, filename=""):
570 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
572 # If we've been passed a string dump it into a temporary file
574 (fd, filename) = tempfile.mkstemp()
575 os.write (fd, message)
578 if Cnf.has_key("Dinstall::MailWhiteList") and \
579 Cnf["Dinstall::MailWhiteList"] != "":
580 message_in = open_file(filename)
581 message_raw = modemail.message_from_file(message_in)
585 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
587 for line in whitelist_in:
588 if not re_whitespace_comment.match(line):
589 if re_re_mark.match(line):
590 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
592 whitelist.append(re.compile(re.escape(line.strip())))
597 fields = ["To", "Bcc", "Cc"]
600 value = message_raw.get(field, None)
603 for item in value.split(","):
604 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
610 if not mail_whitelisted:
611 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
615 # Doesn't have any mail in whitelist so remove the header
617 del message_raw[field]
619 message_raw.replace_header(field, ', '.join(match))
621 # Change message fields in order if we don't have a To header
622 if not message_raw.has_key("To"):
625 if message_raw.has_key(field):
626 message_raw[fields[-1]] = message_raw[field]
627 del message_raw[field]
630 # Clean up any temporary files
631 # and return, as we removed all recipients.
633 os.unlink (filename);
636 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
637 os.write (fd, message_raw.as_string(True));
641 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
643 raise SendmailFailedError, output
645 # Clean up any temporary files
649 ################################################################################
651 def poolify (source, component):
654 if source[:3] == "lib":
655 return component + source[:4] + '/' + source + '/'
657 return component + source[:1] + '/' + source + '/'
659 ################################################################################
661 def move (src, dest, overwrite = 0, perms = 0664):
662 if os.path.exists(dest) and os.path.isdir(dest):
665 dest_dir = os.path.dirname(dest)
666 if not os.path.exists(dest_dir):
667 umask = os.umask(00000)
668 os.makedirs(dest_dir, 02775)
670 #print "Moving %s to %s..." % (src, dest)
671 if os.path.exists(dest) and os.path.isdir(dest):
672 dest += '/' + os.path.basename(src)
673 # Don't overwrite unless forced to
674 if os.path.exists(dest):
676 fubar("Can't move %s to %s - file already exists." % (src, dest))
678 if not os.access(dest, os.W_OK):
679 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
680 shutil.copy2(src, dest)
681 os.chmod(dest, perms)
684 def copy (src, dest, overwrite = 0, perms = 0664):
685 if os.path.exists(dest) and os.path.isdir(dest):
688 dest_dir = os.path.dirname(dest)
689 if not os.path.exists(dest_dir):
690 umask = os.umask(00000)
691 os.makedirs(dest_dir, 02775)
693 #print "Copying %s to %s..." % (src, dest)
694 if os.path.exists(dest) and os.path.isdir(dest):
695 dest += '/' + os.path.basename(src)
696 # Don't overwrite unless forced to
697 if os.path.exists(dest):
699 raise FileExistsError
701 if not os.access(dest, os.W_OK):
702 raise CantOverwriteError
703 shutil.copy2(src, dest)
704 os.chmod(dest, perms)
706 ################################################################################
709 res = socket.gethostbyaddr(socket.gethostname())
710 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
711 if database_hostname:
712 return database_hostname
716 def which_conf_file ():
717 if os.getenv('DAK_CONFIG'):
718 return os.getenv('DAK_CONFIG')
720 res = socket.gethostbyaddr(socket.gethostname())
721 # In case we allow local config files per user, try if one exists
722 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
723 homedir = os.getenv("HOME")
724 confpath = os.path.join(homedir, "/etc/dak.conf")
725 if os.path.exists(confpath):
726 apt_pkg.ReadConfigFileISC(Cnf,default_config)
728 # We are still in here, so there is no local config file or we do
729 # not allow local files. Do the normal stuff.
730 if Cnf.get("Config::" + res[0] + "::DakConfig"):
731 return Cnf["Config::" + res[0] + "::DakConfig"]
733 return default_config
735 def which_apt_conf_file ():
736 res = socket.gethostbyaddr(socket.gethostname())
737 # In case we allow local config files per user, try if one exists
738 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
739 homedir = os.getenv("HOME")
740 confpath = os.path.join(homedir, "/etc/dak.conf")
741 if os.path.exists(confpath):
742 apt_pkg.ReadConfigFileISC(Cnf,default_config)
744 if Cnf.get("Config::" + res[0] + "::AptConfig"):
745 return Cnf["Config::" + res[0] + "::AptConfig"]
747 return default_apt_config
749 def which_alias_file():
750 hostname = socket.gethostbyaddr(socket.gethostname())[0]
751 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
752 if os.path.exists(aliasfn):
757 ################################################################################
759 def TemplateSubst(subst_map, filename):
760 """ Perform a substition of template """
761 templatefile = open_file(filename)
762 template = templatefile.read()
763 for k, v in subst_map.iteritems():
764 template = template.replace(k, str(v))
768 ################################################################################
770 def fubar(msg, exit_code=1):
771 sys.stderr.write("E: %s\n" % (msg))
775 sys.stderr.write("W: %s\n" % (msg))
777 ################################################################################
779 # Returns the user name with a laughable attempt at rfc822 conformancy
780 # (read: removing stray periods).
782 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
785 return pwd.getpwuid(os.getuid())[0]
787 ################################################################################
797 return ("%d%s" % (c, t))
799 ################################################################################
801 def cc_fix_changes (changes):
802 o = changes.get("architecture", "")
804 del changes["architecture"]
805 changes["architecture"] = {}
807 changes["architecture"][j] = 1
809 def changes_compare (a, b):
810 """ Sort by source name, source version, 'have source', and then by filename """
812 a_changes = parse_changes(a)
817 b_changes = parse_changes(b)
821 cc_fix_changes (a_changes)
822 cc_fix_changes (b_changes)
824 # Sort by source name
825 a_source = a_changes.get("source")
826 b_source = b_changes.get("source")
827 q = cmp (a_source, b_source)
831 # Sort by source version
832 a_version = a_changes.get("version", "0")
833 b_version = b_changes.get("version", "0")
834 q = apt_pkg.VersionCompare(a_version, b_version)
838 # Sort by 'have source'
839 a_has_source = a_changes["architecture"].get("source")
840 b_has_source = b_changes["architecture"].get("source")
841 if a_has_source and not b_has_source:
843 elif b_has_source and not a_has_source:
846 # Fall back to sort by filename
849 ################################################################################
851 def find_next_free (dest, too_many=100):
854 while os.path.exists(dest) and extra < too_many:
855 dest = orig_dest + '.' + repr(extra)
857 if extra >= too_many:
858 raise NoFreeFilenameError
861 ################################################################################
863 def result_join (original, sep = '\t'):
865 for i in xrange(len(original)):
866 if original[i] == None:
867 resultlist.append("")
869 resultlist.append(original[i])
870 return sep.join(resultlist)
872 ################################################################################
874 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
876 for line in str.split('\n'):
878 if line or include_blank_lines:
879 out += "%s%s\n" % (prefix, line)
880 # Strip trailing new line
885 ################################################################################
887 def validate_changes_file_arg(filename, require_changes=1):
889 'filename' is either a .changes or .dak file. If 'filename' is a
890 .dak file, it's changed to be the corresponding .changes file. The
891 function then checks if the .changes file a) exists and b) is
892 readable and returns the .changes filename if so. If there's a
893 problem, the next action depends on the option 'require_changes'
896 - If 'require_changes' == -1, errors are ignored and the .changes
897 filename is returned.
898 - If 'require_changes' == 0, a warning is given and 'None' is returned.
899 - If 'require_changes' == 1, a fatal error is raised.
904 orig_filename = filename
905 if filename.endswith(".dak"):
906 filename = filename[:-4]+".changes"
908 if not filename.endswith(".changes"):
909 error = "invalid file type; not a changes file"
911 if not os.access(filename,os.R_OK):
912 if os.path.exists(filename):
913 error = "permission denied"
915 error = "file not found"
918 if require_changes == 1:
919 fubar("%s: %s." % (orig_filename, error))
920 elif require_changes == 0:
921 warn("Skipping %s - %s" % (orig_filename, error))
923 else: # We only care about the .dak file
928 ################################################################################
931 return (arch != "source" and arch != "all")
933 ################################################################################
935 def join_with_commas_and(list):
936 if len(list) == 0: return "nothing"
937 if len(list) == 1: return list[0]
938 return ", ".join(list[:-1]) + " and " + list[-1]
940 ################################################################################
945 (pkg, version, constraint) = atom
947 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
950 pp_deps.append(pp_dep)
951 return " |".join(pp_deps)
953 ################################################################################
958 ################################################################################
960 def parse_args(Options):
961 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
962 # XXX: This should go away and everything which calls it be converted
963 # to use SQLA properly. For now, we'll just fix it not to use
964 # the old Pg interface though
965 session = DBConn().session()
969 for suitename in split_args(Options["Suite"]):
970 suite = get_suite(suitename, session=session)
971 if suite.suite_id is None:
972 warn("suite '%s' not recognised." % (suite.suite_name))
974 suite_ids_list.append(suite.suite_id)
976 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
978 fubar("No valid suite given.")
983 if Options["Component"]:
984 component_ids_list = []
985 for componentname in split_args(Options["Component"]):
986 component = get_component(componentname, session=session)
987 if component is None:
988 warn("component '%s' not recognised." % (componentname))
990 component_ids_list.append(component.component_id)
991 if component_ids_list:
992 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
994 fubar("No valid component given.")
998 # Process architecture
999 con_architectures = ""
1001 if Options["Architecture"]:
1003 for archname in split_args(Options["Architecture"]):
1004 if archname == "source":
1007 arch = get_architecture(archname, session=session)
1009 warn("architecture '%s' not recognised." % (archname))
1011 arch_ids_list.append(arch.arch_id)
1013 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1015 if not check_source:
1016 fubar("No valid architecture given.")
1020 return (con_suites, con_architectures, con_components, check_source)
1022 ################################################################################
1024 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1025 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1028 tb = sys.exc_info()[2]
1035 frame = frame.f_back
1037 traceback.print_exc()
1039 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1040 frame.f_code.co_filename,
1042 for key, value in frame.f_locals.items():
1043 print "\t%20s = " % key,
1047 print "<unable to print>"
1049 ################################################################################
1051 def try_with_debug(function):
1059 ################################################################################
1061 def arch_compare_sw (a, b):
1063 Function for use in sorting lists of architectures.
1065 Sorts normally except that 'source' dominates all others.
1068 if a == "source" and b == "source":
1077 ################################################################################
1079 def split_args (s, dwim=1):
1081 Split command line arguments which can be separated by either commas
1082 or whitespace. If dwim is set, it will complain about string ending
1083 in comma since this usually means someone did 'dak ls -a i386, m68k
1084 foo' or something and the inevitable confusion resulting from 'm68k'
1085 being treated as an argument is undesirable.
1088 if s.find(",") == -1:
1091 if s[-1:] == "," and dwim:
1092 fubar("split_args: found trailing comma, spurious space maybe?")
1095 ################################################################################
1097 def gpgv_get_status_output(cmd, status_read, status_write):
1099 Our very own version of commands.getouputstatus(), hacked to support
1103 cmd = ['/bin/sh', '-c', cmd]
1104 p2cread, p2cwrite = os.pipe()
1105 c2pread, c2pwrite = os.pipe()
1106 errout, errin = os.pipe()
1116 for i in range(3, 256):
1117 if i != status_write:
1123 os.execvp(cmd[0], cmd)
1129 os.dup2(c2pread, c2pwrite)
1130 os.dup2(errout, errin)
1132 output = status = ""
1134 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1137 r = os.read(fd, 8196)
1139 more_data.append(fd)
1140 if fd == c2pwrite or fd == errin:
1142 elif fd == status_read:
1145 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1147 pid, exit_status = os.waitpid(pid, 0)
1149 os.close(status_write)
1150 os.close(status_read)
1160 return output, status, exit_status
1162 ################################################################################
1164 def process_gpgv_output(status):
1165 # Process the status-fd output
1168 for line in status.split('\n'):
1172 split = line.split()
1174 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1176 (gnupg, keyword) = split[:2]
1177 if gnupg != "[GNUPG:]":
1178 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1181 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1182 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1185 keywords[keyword] = args
1187 return (keywords, internal_error)
1189 ################################################################################
1191 def retrieve_key (filename, keyserver=None, keyring=None):
1193 Retrieve the key that signed 'filename' from 'keyserver' and
1194 add it to 'keyring'. Returns nothing on success, or an error message
1198 # Defaults for keyserver and keyring
1200 keyserver = Cnf["Dinstall::KeyServer"]
1202 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1204 # Ensure the filename contains no shell meta-characters or other badness
1205 if not re_taint_free.match(filename):
1206 return "%s: tainted filename" % (filename)
1208 # Invoke gpgv on the file
1209 status_read, status_write = os.pipe()
1210 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1211 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1213 # Process the status-fd output
1214 (keywords, internal_error) = process_gpgv_output(status)
1216 return internal_error
1218 if not keywords.has_key("NO_PUBKEY"):
1219 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1221 fingerprint = keywords["NO_PUBKEY"][0]
1222 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1223 # it'll try to create a lockfile in /dev. A better solution might
1224 # be a tempfile or something.
1225 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1226 % (Cnf["Dinstall::SigningKeyring"])
1227 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1228 % (keyring, keyserver, fingerprint)
1229 (result, output) = commands.getstatusoutput(cmd)
1231 return "'%s' failed with exit code %s" % (cmd, result)
1235 ################################################################################
1237 def gpg_keyring_args(keyrings=None):
1239 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1241 return " ".join(["--keyring %s" % x for x in keyrings])
1243 ################################################################################
1245 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1247 Check the signature of a file and return the fingerprint if the
1248 signature is valid or 'None' if it's not. The first argument is the
1249 filename whose signature should be checked. The second argument is a
1250 reject function and is called when an error is found. The reject()
1251 function must allow for two arguments: the first is the error message,
1252 the second is an optional prefix string. It's possible for reject()
1253 to be called more than once during an invocation of check_signature().
1254 The third argument is optional and is the name of the files the
1255 detached signature applies to. The fourth argument is optional and is
1256 a *list* of keyrings to use. 'autofetch' can either be None, True or
1257 False. If None, the default behaviour specified in the config will be
1263 # Ensure the filename contains no shell meta-characters or other badness
1264 if not re_taint_free.match(sig_filename):
1265 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1266 return (None, rejects)
1268 if data_filename and not re_taint_free.match(data_filename):
1269 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1270 return (None, rejects)
1273 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1275 # Autofetch the signing key if that's enabled
1276 if autofetch == None:
1277 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1279 error_msg = retrieve_key(sig_filename)
1281 rejects.append(error_msg)
1282 return (None, rejects)
1284 # Build the command line
1285 status_read, status_write = os.pipe()
1286 cmd = "gpgv --status-fd %s %s %s %s" % (
1287 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1289 # Invoke gpgv on the file
1290 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1292 # Process the status-fd output
1293 (keywords, internal_error) = process_gpgv_output(status)
1295 # If we failed to parse the status-fd output, let's just whine and bail now
1297 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1298 rejects.append(internal_error, "")
1299 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1300 return (None, rejects)
1302 # Now check for obviously bad things in the processed output
1303 if keywords.has_key("KEYREVOKED"):
1304 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1305 if keywords.has_key("BADSIG"):
1306 rejects.append("bad signature on %s." % (sig_filename))
1307 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1308 rejects.append("failed to check signature on %s." % (sig_filename))
1309 if keywords.has_key("NO_PUBKEY"):
1310 args = keywords["NO_PUBKEY"]
1313 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1314 if keywords.has_key("BADARMOR"):
1315 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1316 if keywords.has_key("NODATA"):
1317 rejects.append("no signature found in %s." % (sig_filename))
1318 if keywords.has_key("EXPKEYSIG"):
1319 args = keywords["EXPKEYSIG"]
1322 rejects.append("Signature made by expired key 0x%s" % (key))
1323 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1324 args = keywords["KEYEXPIRED"]
1328 if timestamp.count("T") == 0:
1330 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1332 expiredate = "unknown (%s)" % (timestamp)
1334 expiredate = timestamp
1335 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1337 if len(rejects) > 0:
1338 return (None, rejects)
1340 # Next check gpgv exited with a zero return code
1342 rejects.append("gpgv failed while checking %s." % (sig_filename))
1344 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1346 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1347 return (None, rejects)
1349 # Sanity check the good stuff we expect
1350 if not keywords.has_key("VALIDSIG"):
1351 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1353 args = keywords["VALIDSIG"]
1355 rejects.append("internal error while checking signature on %s." % (sig_filename))
1357 fingerprint = args[0]
1358 if not keywords.has_key("GOODSIG"):
1359 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1360 if not keywords.has_key("SIG_ID"):
1361 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1363 # Finally ensure there's not something we don't recognise
1364 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1365 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1366 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1368 for keyword in keywords.keys():
1369 if not known_keywords.has_key(keyword):
1370 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1372 if len(rejects) > 0:
1373 return (None, rejects)
1375 return (fingerprint, [])
1377 ################################################################################
1379 def gpg_get_key_addresses(fingerprint):
1380 """retreive email addresses from gpg key uids for a given fingerprint"""
1381 addresses = key_uid_email_cache.get(fingerprint)
1382 if addresses != None:
1385 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1386 % (gpg_keyring_args(), fingerprint)
1387 (result, output) = commands.getstatusoutput(cmd)
1389 for l in output.split('\n'):
1390 m = re_gpg_uid.match(l)
1392 addresses.add(m.group(1))
1393 key_uid_email_cache[fingerprint] = addresses
1396 ################################################################################
1398 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1400 def wrap(paragraph, max_length, prefix=""):
1404 words = paragraph.split()
1407 word_size = len(word)
1408 if word_size > max_length:
1410 s += line + '\n' + prefix
1411 s += word + '\n' + prefix
1414 new_length = len(line) + word_size + 1
1415 if new_length > max_length:
1416 s += line + '\n' + prefix
1429 ################################################################################
1431 def clean_symlink (src, dest, root):
1433 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1436 src = src.replace(root, '', 1)
1437 dest = dest.replace(root, '', 1)
1438 dest = os.path.dirname(dest)
1439 new_src = '../' * len(dest.split('/'))
1440 return new_src + src
1442 ################################################################################
1444 def temp_filename(directory=None, prefix="dak", suffix=""):
1446 Return a secure and unique filename by pre-creating it.
1447 If 'directory' is non-null, it will be the directory the file is pre-created in.
1448 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1449 If 'suffix' is non-null, the filename will end with it.
1451 Returns a pair (fd, name).
1454 return tempfile.mkstemp(suffix, prefix, directory)
1456 ################################################################################
1458 def temp_dirname(parent=None, prefix="dak", suffix=""):
1460 Return a secure and unique directory by pre-creating it.
1461 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1462 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1463 If 'suffix' is non-null, the filename will end with it.
1465 Returns a pathname to the new directory
1468 return tempfile.mkdtemp(suffix, prefix, parent)
1470 ################################################################################
1472 def is_email_alias(email):
1473 """ checks if the user part of the email is listed in the alias file """
1475 if alias_cache == None:
1476 aliasfn = which_alias_file()
1479 for l in open(aliasfn):
1480 alias_cache.add(l.split(':')[0])
1481 uid = email.split('@')[0]
1482 return uid in alias_cache
1484 ################################################################################
1486 def get_changes_files(from_dir):
1488 Takes a directory and lists all .changes files in it (as well as chdir'ing
1489 to the directory; this is due to broken behaviour on the part of p-u/p-a
1490 when you're not in the right place)
1492 Returns a list of filenames
1495 # Much of the rest of p-u/p-a depends on being in the right place
1497 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1499 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1501 return changes_files
1503 ################################################################################
1507 Cnf = apt_pkg.newConfiguration()
1508 if not os.getenv("DAK_TEST"):
1509 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1511 if which_conf_file() != default_config:
1512 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())