2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from srcformats import get_format_from_string
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 # Monkeypatch commands.getstatusoutput as it returns a "0" exit code in
67 # all situations under lenny's Python.
69 def dak_getstatusoutput(cmd):
70 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
71 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
73 output = "".join(pipe.stdout.readlines())
80 commands.getstatusoutput = dak_getstatusoutput
82 ################################################################################
85 """ Escape html chars """
86 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
88 ################################################################################
90 def open_file(filename, mode='r'):
92 Open C{file}, return fileobject.
94 @type filename: string
95 @param filename: path/filename to open
98 @param mode: open mode
101 @return: open fileobject
103 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
107 f = open(filename, mode)
109 raise CantOpenError, filename
112 ################################################################################
114 def our_raw_input(prompt=""):
116 sys.stdout.write(prompt)
122 sys.stderr.write("\nUser interrupt (^D).\n")
125 ################################################################################
127 def extract_component_from_section(section):
130 if section.find('/') != -1:
131 component = section.split('/')[0]
133 # Expand default component
135 if Cnf.has_key("Component::%s" % section):
140 return (section, component)
142 ################################################################################
144 def parse_deb822(contents, signing_rules=0):
148 # Split the lines in the input, keeping the linebreaks.
149 lines = contents.splitlines(True)
152 raise ParseChangesError, "[Empty changes file]"
154 # Reindex by line number so we can easily verify the format of
160 indexed_lines[index] = line[:-1]
164 num_of_lines = len(indexed_lines.keys())
167 while index < num_of_lines:
169 line = indexed_lines[index]
171 if signing_rules == 1:
173 if index > num_of_lines:
174 raise InvalidDscError, index
175 line = indexed_lines[index]
176 if not line.startswith("-----BEGIN PGP SIGNATURE"):
177 raise InvalidDscError, index
182 if line.startswith("-----BEGIN PGP SIGNATURE"):
184 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
186 if signing_rules == 1:
187 while index < num_of_lines and line != "":
189 line = indexed_lines[index]
191 # If we're not inside the signed data, don't process anything
192 if signing_rules >= 0 and not inside_signature:
194 slf = re_single_line_field.match(line)
196 field = slf.groups()[0].lower()
197 changes[field] = slf.groups()[1]
201 changes[field] += '\n'
203 mlf = re_multi_line_field.match(line)
206 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
207 if first == 1 and changes[field] != "":
208 changes[field] += '\n'
210 changes[field] += mlf.groups()[0] + '\n'
214 if signing_rules == 1 and inside_signature:
215 raise InvalidDscError, index
217 changes["filecontents"] = "".join(lines)
219 if changes.has_key("source"):
220 # Strip the source version in brackets from the source field,
221 # put it in the "source-version" field instead.
222 srcver = re_srchasver.search(changes["source"])
224 changes["source"] = srcver.group(1)
225 changes["source-version"] = srcver.group(2)
228 raise ParseChangesError, error
232 ################################################################################
234 def parse_changes(filename, signing_rules=0):
236 Parses a changes file and returns a dictionary where each field is a
237 key. The mandatory first argument is the filename of the .changes
240 signing_rules is an optional argument:
242 - If signing_rules == -1, no signature is required.
243 - If signing_rules == 0 (the default), a signature is required.
244 - If signing_rules == 1, it turns on the same strict format checking
247 The rules for (signing_rules == 1)-mode are:
249 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
250 followed by any PGP header data and must end with a blank line.
252 - The data section must end with a blank line and must be followed by
253 "-----BEGIN PGP SIGNATURE-----".
256 changes_in = open_file(filename)
257 content = changes_in.read()
260 unicode(content, 'utf-8')
262 raise ChangesUnicodeError, "Changes file not proper utf-8"
263 return parse_deb822(content, signing_rules)
265 ################################################################################
267 def hash_key(hashname):
268 return '%ssum' % hashname
270 ################################################################################
272 def create_hash(where, files, hashname, hashfunc):
274 create_hash extends the passed files dict with the given hash by
275 iterating over all files on disk and passing them to the hashing
280 for f in files.keys():
282 file_handle = open_file(f)
283 except CantOpenError:
284 rejmsg.append("Could not open file %s for checksumming" % (f))
287 files[f][hash_key(hashname)] = hashfunc(file_handle)
292 ################################################################################
294 def check_hash(where, files, hashname, hashfunc):
296 check_hash checks the given hash in the files dict against the actual
297 files on disk. The hash values need to be present consistently in
298 all file entries. It does not modify its input in any way.
302 for f in files.keys():
306 file_handle = open_file(f)
308 # Check for the hash entry, to not trigger a KeyError.
309 if not files[f].has_key(hash_key(hashname)):
310 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
314 # Actually check the hash for correctness.
315 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
316 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
318 except CantOpenError:
319 # TODO: This happens when the file is in the pool.
320 # warn("Cannot open file %s" % f)
327 ################################################################################
329 def check_size(where, files):
331 check_size checks the file sizes in the passed files dict against the
336 for f in files.keys():
341 # TODO: This happens when the file is in the pool.
345 actual_size = entry[stat.ST_SIZE]
346 size = int(files[f]["size"])
347 if size != actual_size:
348 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
349 % (f, actual_size, size, where))
352 ################################################################################
354 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
356 Verify that the files listed in the Files field of the .dsc are
357 those expected given the announced Format.
359 @type dsc_filename: string
360 @param dsc_filename: path of .dsc file
363 @param dsc: the content of the .dsc parsed by C{parse_changes()}
365 @type dsc_files: dict
366 @param dsc_files: the file list returned by C{build_file_list()}
369 @return: all errors detected
373 # Parse the file if needed
375 dsc = parse_changes(dsc_filename, signing_rules=1);
377 if dsc_files is None:
378 dsc_files = build_file_list(dsc, is_a_dsc=1)
380 # Ensure .dsc lists proper set of source files according to the format
382 has = defaultdict(lambda: 0)
385 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
386 (r'diff.gz', ('debian_diff',)),
387 (r'tar.gz', ('native_tar_gz', 'native_tar')),
388 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
389 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
390 (r'tar\.(gz|bz2)', ('native_tar',)),
391 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
394 for f in dsc_files.keys():
395 m = re_issource.match(f)
397 rejmsg.append("%s: %s in Files field not recognised as source."
401 # Populate 'has' dictionary by resolving keys in lookup table
403 for regex, keys in ftype_lookup:
404 if re.match(regex, m.group(3)):
410 # File does not match anything in lookup table; reject
412 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
414 # Check for multiple files
415 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
416 if has[file_type] > 1:
417 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
419 # Source format specific tests
421 format = get_format_from_string(dsc['format'])
423 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
426 except UnknownFormatError:
427 # Not an error here for now
432 ################################################################################
434 def check_hash_fields(what, manifest):
436 check_hash_fields ensures that there are no checksum fields in the
437 given dict that we do not know about.
441 hashes = map(lambda x: x[0], known_hashes)
442 for field in manifest:
443 if field.startswith("checksums-"):
444 hashname = field.split("-",1)[1]
445 if hashname not in hashes:
446 rejmsg.append("Unsupported checksum field for %s "\
447 "in %s" % (hashname, what))
450 ################################################################################
452 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
453 if format >= version:
454 # The version should contain the specified hash.
457 # Import hashes from the changes
458 rejmsg = parse_checksums(".changes", files, changes, hashname)
462 # We need to calculate the hash because it can't possibly
465 return func(".changes", files, hashname, hashfunc)
467 # We could add the orig which might be in the pool to the files dict to
468 # access the checksums easily.
470 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
472 ensure_dsc_hashes' task is to ensure that each and every *present* hash
473 in the dsc is correct, i.e. identical to the changes file and if necessary
474 the pool. The latter task is delegated to check_hash.
478 if not dsc.has_key('Checksums-%s' % (hashname,)):
480 # Import hashes from the dsc
481 parse_checksums(".dsc", dsc_files, dsc, hashname)
483 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
486 ################################################################################
488 def parse_checksums(where, files, manifest, hashname):
490 field = 'checksums-%s' % hashname
491 if not field in manifest:
493 for line in manifest[field].split('\n'):
496 clist = line.strip().split(' ')
498 checksum, size, checkfile = clist
500 rejmsg.append("Cannot parse checksum line [%s]" % (line))
502 if not files.has_key(checkfile):
503 # TODO: check for the file's entry in the original files dict, not
504 # the one modified by (auto)byhand and other weird stuff
505 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
506 # (file, hashname, where))
508 if not files[checkfile]["size"] == size:
509 rejmsg.append("%s: size differs for files and checksums-%s entry "\
510 "in %s" % (checkfile, hashname, where))
512 files[checkfile][hash_key(hashname)] = checksum
513 for f in files.keys():
514 if not files[f].has_key(hash_key(hashname)):
515 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
519 ################################################################################
521 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
523 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
526 # Make sure we have a Files: field to parse...
527 if not changes.has_key(field):
528 raise NoFilesFieldError
530 # Get SourceFormat object for this Format and validate it
531 format = get_format_from_string(changes['format'])
532 format.validate_format(is_a_dsc=is_a_dsc, field=field)
534 includes_section = (not is_a_dsc) and field == "files"
536 # Parse each entry/line:
537 for i in changes[field].split('\n'):
541 section = priority = ""
544 (md5, size, section, priority, name) = s
546 (md5, size, name) = s
548 raise ParseChangesError, i
555 (section, component) = extract_component_from_section(section)
557 files[name] = Dict(size=size, section=section,
558 priority=priority, component=component)
559 files[name][hashname] = md5
563 ################################################################################
565 def send_mail (message, filename=""):
566 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
568 # If we've been passed a string dump it into a temporary file
570 (fd, filename) = tempfile.mkstemp()
571 os.write (fd, message)
574 if Cnf.has_key("Dinstall::MailWhiteList") and \
575 Cnf["Dinstall::MailWhiteList"] != "":
576 message_in = open_file(filename)
577 message_raw = modemail.message_from_file(message_in)
581 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
583 for line in whitelist_in:
584 if not re_whitespace_comment.match(line):
585 if re_re_mark.match(line):
586 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
588 whitelist.append(re.compile(re.escape(line.strip())))
593 fields = ["To", "Bcc", "Cc"]
596 value = message_raw.get(field, None)
599 for item in value.split(","):
600 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
606 if not mail_whitelisted:
607 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
611 # Doesn't have any mail in whitelist so remove the header
613 del message_raw[field]
615 message_raw.replace_header(field, string.join(match, ", "))
617 # Change message fields in order if we don't have a To header
618 if not message_raw.has_key("To"):
621 if message_raw.has_key(field):
622 message_raw[fields[-1]] = message_raw[field]
623 del message_raw[field]
626 # Clean up any temporary files
627 # and return, as we removed all recipients.
629 os.unlink (filename);
632 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
633 os.write (fd, message_raw.as_string(True));
637 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
639 raise SendmailFailedError, output
641 # Clean up any temporary files
645 ################################################################################
647 def poolify (source, component):
650 if source[:3] == "lib":
651 return component + source[:4] + '/' + source + '/'
653 return component + source[:1] + '/' + source + '/'
655 ################################################################################
657 def move (src, dest, overwrite = 0, perms = 0664):
658 if os.path.exists(dest) and os.path.isdir(dest):
661 dest_dir = os.path.dirname(dest)
662 if not os.path.exists(dest_dir):
663 umask = os.umask(00000)
664 os.makedirs(dest_dir, 02775)
666 #print "Moving %s to %s..." % (src, dest)
667 if os.path.exists(dest) and os.path.isdir(dest):
668 dest += '/' + os.path.basename(src)
669 # Don't overwrite unless forced to
670 if os.path.exists(dest):
672 fubar("Can't move %s to %s - file already exists." % (src, dest))
674 if not os.access(dest, os.W_OK):
675 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
676 shutil.copy2(src, dest)
677 os.chmod(dest, perms)
680 def copy (src, dest, overwrite = 0, perms = 0664):
681 if os.path.exists(dest) and os.path.isdir(dest):
684 dest_dir = os.path.dirname(dest)
685 if not os.path.exists(dest_dir):
686 umask = os.umask(00000)
687 os.makedirs(dest_dir, 02775)
689 #print "Copying %s to %s..." % (src, dest)
690 if os.path.exists(dest) and os.path.isdir(dest):
691 dest += '/' + os.path.basename(src)
692 # Don't overwrite unless forced to
693 if os.path.exists(dest):
695 raise FileExistsError
697 if not os.access(dest, os.W_OK):
698 raise CantOverwriteError
699 shutil.copy2(src, dest)
700 os.chmod(dest, perms)
702 ################################################################################
705 res = socket.gethostbyaddr(socket.gethostname())
706 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
707 if database_hostname:
708 return database_hostname
712 def which_conf_file ():
713 if os.getenv("DAK_CONFIG"):
714 print(os.getenv("DAK_CONFIG"))
715 return os.getenv("DAK_CONFIG")
717 res = socket.gethostbyaddr(socket.gethostname())
718 # In case we allow local config files per user, try if one exists
719 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
720 homedir = os.getenv("HOME")
721 confpath = os.path.join(homedir, "/etc/dak.conf")
722 if os.path.exists(confpath):
723 apt_pkg.ReadConfigFileISC(Cnf,default_config)
725 # We are still in here, so there is no local config file or we do
726 # not allow local files. Do the normal stuff.
727 if Cnf.get("Config::" + res[0] + "::DakConfig"):
728 return Cnf["Config::" + res[0] + "::DakConfig"]
730 return default_config
732 def which_apt_conf_file ():
733 res = socket.gethostbyaddr(socket.gethostname())
734 # In case we allow local config files per user, try if one exists
735 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
736 homedir = os.getenv("HOME")
737 confpath = os.path.join(homedir, "/etc/dak.conf")
738 if os.path.exists(confpath):
739 apt_pkg.ReadConfigFileISC(Cnf,default_config)
741 if Cnf.get("Config::" + res[0] + "::AptConfig"):
742 return Cnf["Config::" + res[0] + "::AptConfig"]
744 return default_apt_config
746 def which_alias_file():
747 hostname = socket.gethostbyaddr(socket.gethostname())[0]
748 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
749 if os.path.exists(aliasfn):
754 ################################################################################
756 def TemplateSubst(map, filename):
757 """ Perform a substition of template """
758 templatefile = open_file(filename)
759 template = templatefile.read()
761 template = template.replace(x, str(map[x]))
765 ################################################################################
767 def fubar(msg, exit_code=1):
768 sys.stderr.write("E: %s\n" % (msg))
772 sys.stderr.write("W: %s\n" % (msg))
774 ################################################################################
776 # Returns the user name with a laughable attempt at rfc822 conformancy
777 # (read: removing stray periods).
779 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
782 return pwd.getpwuid(os.getuid())[0]
784 ################################################################################
794 return ("%d%s" % (c, t))
796 ################################################################################
798 def cc_fix_changes (changes):
799 o = changes.get("architecture", "")
801 del changes["architecture"]
802 changes["architecture"] = {}
804 changes["architecture"][j] = 1
806 def changes_compare (a, b):
807 """ Sort by source name, source version, 'have source', and then by filename """
809 a_changes = parse_changes(a)
814 b_changes = parse_changes(b)
818 cc_fix_changes (a_changes)
819 cc_fix_changes (b_changes)
821 # Sort by source name
822 a_source = a_changes.get("source")
823 b_source = b_changes.get("source")
824 q = cmp (a_source, b_source)
828 # Sort by source version
829 a_version = a_changes.get("version", "0")
830 b_version = b_changes.get("version", "0")
831 q = apt_pkg.VersionCompare(a_version, b_version)
835 # Sort by 'have source'
836 a_has_source = a_changes["architecture"].get("source")
837 b_has_source = b_changes["architecture"].get("source")
838 if a_has_source and not b_has_source:
840 elif b_has_source and not a_has_source:
843 # Fall back to sort by filename
846 ################################################################################
848 def find_next_free (dest, too_many=100):
851 while os.path.exists(dest) and extra < too_many:
852 dest = orig_dest + '.' + repr(extra)
854 if extra >= too_many:
855 raise NoFreeFilenameError
858 ################################################################################
860 def result_join (original, sep = '\t'):
862 for i in xrange(len(original)):
863 if original[i] == None:
864 resultlist.append("")
866 resultlist.append(original[i])
867 return sep.join(resultlist)
869 ################################################################################
871 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
873 for line in str.split('\n'):
875 if line or include_blank_lines:
876 out += "%s%s\n" % (prefix, line)
877 # Strip trailing new line
882 ################################################################################
884 def validate_changes_file_arg(filename, require_changes=1):
886 'filename' is either a .changes or .dak file. If 'filename' is a
887 .dak file, it's changed to be the corresponding .changes file. The
888 function then checks if the .changes file a) exists and b) is
889 readable and returns the .changes filename if so. If there's a
890 problem, the next action depends on the option 'require_changes'
893 - If 'require_changes' == -1, errors are ignored and the .changes
894 filename is returned.
895 - If 'require_changes' == 0, a warning is given and 'None' is returned.
896 - If 'require_changes' == 1, a fatal error is raised.
901 orig_filename = filename
902 if filename.endswith(".dak"):
903 filename = filename[:-4]+".changes"
905 if not filename.endswith(".changes"):
906 error = "invalid file type; not a changes file"
908 if not os.access(filename,os.R_OK):
909 if os.path.exists(filename):
910 error = "permission denied"
912 error = "file not found"
915 if require_changes == 1:
916 fubar("%s: %s." % (orig_filename, error))
917 elif require_changes == 0:
918 warn("Skipping %s - %s" % (orig_filename, error))
920 else: # We only care about the .dak file
925 ################################################################################
928 return (arch != "source" and arch != "all")
930 ################################################################################
932 def join_with_commas_and(list):
933 if len(list) == 0: return "nothing"
934 if len(list) == 1: return list[0]
935 return ", ".join(list[:-1]) + " and " + list[-1]
937 ################################################################################
942 (pkg, version, constraint) = atom
944 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
947 pp_deps.append(pp_dep)
948 return " |".join(pp_deps)
950 ################################################################################
955 ################################################################################
957 def parse_args(Options):
958 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
959 # XXX: This should go away and everything which calls it be converted
960 # to use SQLA properly. For now, we'll just fix it not to use
961 # the old Pg interface though
962 session = DBConn().session()
966 for suitename in split_args(Options["Suite"]):
967 suite = get_suite(suitename, session=session)
968 if suite.suite_id is None:
969 warn("suite '%s' not recognised." % (suite.suite_name))
971 suite_ids_list.append(suite.suite_id)
973 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
975 fubar("No valid suite given.")
980 if Options["Component"]:
981 component_ids_list = []
982 for componentname in split_args(Options["Component"]):
983 component = get_component(componentname, session=session)
984 if component is None:
985 warn("component '%s' not recognised." % (componentname))
987 component_ids_list.append(component.component_id)
988 if component_ids_list:
989 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
991 fubar("No valid component given.")
995 # Process architecture
996 con_architectures = ""
998 if Options["Architecture"]:
1000 for archname in split_args(Options["Architecture"]):
1001 if archname == "source":
1004 arch = get_architecture(archname, session=session)
1006 warn("architecture '%s' not recognised." % (archname))
1008 arch_ids_list.append(arch.arch_id)
1010 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1012 if not check_source:
1013 fubar("No valid architecture given.")
1017 return (con_suites, con_architectures, con_components, check_source)
1019 ################################################################################
1021 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1022 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1025 tb = sys.exc_info()[2]
1032 frame = frame.f_back
1034 traceback.print_exc()
1036 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1037 frame.f_code.co_filename,
1039 for key, value in frame.f_locals.items():
1040 print "\t%20s = " % key,
1044 print "<unable to print>"
1046 ################################################################################
1048 def try_with_debug(function):
1056 ################################################################################
1058 def arch_compare_sw (a, b):
1060 Function for use in sorting lists of architectures.
1062 Sorts normally except that 'source' dominates all others.
1065 if a == "source" and b == "source":
1074 ################################################################################
1076 def split_args (s, dwim=1):
1078 Split command line arguments which can be separated by either commas
1079 or whitespace. If dwim is set, it will complain about string ending
1080 in comma since this usually means someone did 'dak ls -a i386, m68k
1081 foo' or something and the inevitable confusion resulting from 'm68k'
1082 being treated as an argument is undesirable.
1085 if s.find(",") == -1:
1088 if s[-1:] == "," and dwim:
1089 fubar("split_args: found trailing comma, spurious space maybe?")
1092 ################################################################################
1094 def Dict(**dict): return dict
1096 ########################################
1098 def gpgv_get_status_output(cmd, status_read, status_write):
1100 Our very own version of commands.getouputstatus(), hacked to support
1104 cmd = ['/bin/sh', '-c', cmd]
1105 p2cread, p2cwrite = os.pipe()
1106 c2pread, c2pwrite = os.pipe()
1107 errout, errin = os.pipe()
1117 for i in range(3, 256):
1118 if i != status_write:
1124 os.execvp(cmd[0], cmd)
1130 os.dup2(c2pread, c2pwrite)
1131 os.dup2(errout, errin)
1133 output = status = ""
1135 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1138 r = os.read(fd, 8196)
1140 more_data.append(fd)
1141 if fd == c2pwrite or fd == errin:
1143 elif fd == status_read:
1146 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1148 pid, exit_status = os.waitpid(pid, 0)
1150 os.close(status_write)
1151 os.close(status_read)
1161 return output, status, exit_status
1163 ################################################################################
1165 def process_gpgv_output(status):
1166 # Process the status-fd output
1169 for line in status.split('\n'):
1173 split = line.split()
1175 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1177 (gnupg, keyword) = split[:2]
1178 if gnupg != "[GNUPG:]":
1179 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1182 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1183 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1186 keywords[keyword] = args
1188 return (keywords, internal_error)
1190 ################################################################################
1192 def retrieve_key (filename, keyserver=None, keyring=None):
1194 Retrieve the key that signed 'filename' from 'keyserver' and
1195 add it to 'keyring'. Returns nothing on success, or an error message
1199 # Defaults for keyserver and keyring
1201 keyserver = Cnf["Dinstall::KeyServer"]
1203 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1205 # Ensure the filename contains no shell meta-characters or other badness
1206 if not re_taint_free.match(filename):
1207 return "%s: tainted filename" % (filename)
1209 # Invoke gpgv on the file
1210 status_read, status_write = os.pipe()
1211 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1212 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1214 # Process the status-fd output
1215 (keywords, internal_error) = process_gpgv_output(status)
1217 return internal_error
1219 if not keywords.has_key("NO_PUBKEY"):
1220 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1222 fingerprint = keywords["NO_PUBKEY"][0]
1223 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1224 # it'll try to create a lockfile in /dev. A better solution might
1225 # be a tempfile or something.
1226 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1227 % (Cnf["Dinstall::SigningKeyring"])
1228 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1229 % (keyring, keyserver, fingerprint)
1230 (result, output) = commands.getstatusoutput(cmd)
1232 return "'%s' failed with exit code %s" % (cmd, result)
1236 ################################################################################
1238 def gpg_keyring_args(keyrings=None):
1240 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1242 return " ".join(["--keyring %s" % x for x in keyrings])
1244 ################################################################################
1246 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1248 Check the signature of a file and return the fingerprint if the
1249 signature is valid or 'None' if it's not. The first argument is the
1250 filename whose signature should be checked. The second argument is a
1251 reject function and is called when an error is found. The reject()
1252 function must allow for two arguments: the first is the error message,
1253 the second is an optional prefix string. It's possible for reject()
1254 to be called more than once during an invocation of check_signature().
1255 The third argument is optional and is the name of the files the
1256 detached signature applies to. The fourth argument is optional and is
1257 a *list* of keyrings to use. 'autofetch' can either be None, True or
1258 False. If None, the default behaviour specified in the config will be
1264 # Ensure the filename contains no shell meta-characters or other badness
1265 if not re_taint_free.match(sig_filename):
1266 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1267 return (None, rejects)
1269 if data_filename and not re_taint_free.match(data_filename):
1270 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1271 return (None, rejects)
1274 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1276 # Autofetch the signing key if that's enabled
1277 if autofetch == None:
1278 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1280 error_msg = retrieve_key(sig_filename)
1282 rejects.append(error_msg)
1283 return (None, rejects)
1285 # Build the command line
1286 status_read, status_write = os.pipe()
1287 cmd = "gpgv --status-fd %s %s %s %s" % (
1288 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1290 # Invoke gpgv on the file
1291 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1293 # Process the status-fd output
1294 (keywords, internal_error) = process_gpgv_output(status)
1296 # If we failed to parse the status-fd output, let's just whine and bail now
1298 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1299 rejects.append(internal_error, "")
1300 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1301 return (None, rejects)
1303 # Now check for obviously bad things in the processed output
1304 if keywords.has_key("KEYREVOKED"):
1305 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1306 if keywords.has_key("BADSIG"):
1307 rejects.append("bad signature on %s." % (sig_filename))
1308 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1309 rejects.append("failed to check signature on %s." % (sig_filename))
1310 if keywords.has_key("NO_PUBKEY"):
1311 args = keywords["NO_PUBKEY"]
1314 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1315 if keywords.has_key("BADARMOR"):
1316 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1317 if keywords.has_key("NODATA"):
1318 rejects.append("no signature found in %s." % (sig_filename))
1319 if keywords.has_key("EXPKEYSIG"):
1320 args = keywords["EXPKEYSIG"]
1323 rejects.append("Signature made by expired key 0x%s" % (key))
1324 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1325 args = keywords["KEYEXPIRED"]
1329 if timestamp.count("T") == 0:
1331 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1333 expiredate = "unknown (%s)" % (timestamp)
1335 expiredate = timestamp
1336 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1338 if len(rejects) > 0:
1339 return (None, rejects)
1341 # Next check gpgv exited with a zero return code
1343 rejects.append("gpgv failed while checking %s." % (sig_filename))
1345 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1347 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1348 return (None, rejects)
1350 # Sanity check the good stuff we expect
1351 if not keywords.has_key("VALIDSIG"):
1352 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1354 args = keywords["VALIDSIG"]
1356 rejects.append("internal error while checking signature on %s." % (sig_filename))
1358 fingerprint = args[0]
1359 if not keywords.has_key("GOODSIG"):
1360 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1361 if not keywords.has_key("SIG_ID"):
1362 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1364 # Finally ensure there's not something we don't recognise
1365 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1366 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1367 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1369 for keyword in keywords.keys():
1370 if not known_keywords.has_key(keyword):
1371 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1373 if len(rejects) > 0:
1374 return (None, rejects)
1376 return (fingerprint, [])
1378 ################################################################################
1380 def gpg_get_key_addresses(fingerprint):
1381 """retreive email addresses from gpg key uids for a given fingerprint"""
1382 addresses = key_uid_email_cache.get(fingerprint)
1383 if addresses != None:
1386 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1387 % (gpg_keyring_args(), fingerprint)
1388 (result, output) = commands.getstatusoutput(cmd)
1390 for l in output.split('\n'):
1391 m = re_gpg_uid.match(l)
1393 addresses.add(m.group(1))
1394 key_uid_email_cache[fingerprint] = addresses
1397 ################################################################################
1399 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1401 def wrap(paragraph, max_length, prefix=""):
1405 words = paragraph.split()
1408 word_size = len(word)
1409 if word_size > max_length:
1411 s += line + '\n' + prefix
1412 s += word + '\n' + prefix
1415 new_length = len(line) + word_size + 1
1416 if new_length > max_length:
1417 s += line + '\n' + prefix
1430 ################################################################################
1432 def clean_symlink (src, dest, root):
1434 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1437 src = src.replace(root, '', 1)
1438 dest = dest.replace(root, '', 1)
1439 dest = os.path.dirname(dest)
1440 new_src = '../' * len(dest.split('/'))
1441 return new_src + src
1443 ################################################################################
1445 def temp_filename(directory=None, prefix="dak", suffix=""):
1447 Return a secure and unique filename by pre-creating it.
1448 If 'directory' is non-null, it will be the directory the file is pre-created in.
1449 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1450 If 'suffix' is non-null, the filename will end with it.
1452 Returns a pair (fd, name).
1455 return tempfile.mkstemp(suffix, prefix, directory)
1457 ################################################################################
1459 def temp_dirname(parent=None, prefix="dak", suffix=""):
1461 Return a secure and unique directory by pre-creating it.
1462 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1463 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1464 If 'suffix' is non-null, the filename will end with it.
1466 Returns a pathname to the new directory
1469 return tempfile.mkdtemp(suffix, prefix, parent)
1471 ################################################################################
1473 def is_email_alias(email):
1474 """ checks if the user part of the email is listed in the alias file """
1476 if alias_cache == None:
1477 aliasfn = which_alias_file()
1480 for l in open(aliasfn):
1481 alias_cache.add(l.split(':')[0])
1482 uid = email.split('@')[0]
1483 return uid in alias_cache
1485 ################################################################################
1487 def get_changes_files(dir):
1489 Takes a directory and lists all .changes files in it (as well as chdir'ing
1490 to the directory; this is due to broken behaviour on the part of p-u/p-a
1491 when you're not in the right place)
1493 Returns a list of filenames
1496 # Much of the rest of p-u/p-a depends on being in the right place
1498 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1500 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1502 return changes_files
1504 ################################################################################
1508 Cnf = apt_pkg.newConfiguration()
1509 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1511 #if which_conf_file() != default_config:
1512 # apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1514 ###############################################################################
1516 def ensure_orig_files(changes, dest_dir, session):
1518 Ensure that dest_dir contains all the orig tarballs for the specified
1519 changes. If it does not, symlink them into place.
1521 Returns a 2-tuple (already_exists, symlinked) containing a list of files
1522 that were already there and a list of files that were symlinked into place.
1525 exists, symlinked = [], []
1527 for dsc_file in changes.dsc_files:
1529 # Skip all files that are not orig tarballs
1530 if not re_is_orig_source.match(dsc_file):
1533 # Skip orig files not identified in the pool
1534 if not (dsc_file in changes.orig_files and
1535 'id' in changes.orig_files[dsc_file]):
1538 dest = os.path.join(dest_dir, dsc_file)
1540 if os.path.exists(dest):
1544 orig_file_id = changes.orig_files[dsc_file]['id']
1546 c = session.execute(
1547 'SELECT l.path, f.filename FROM location l, files f WHERE f.id = :id and f.location = l.id',
1548 {'id': orig_file_id}
1553 return "[INTERNAL ERROR] Couldn't find id %s in files table." % orig_file_id
1555 src = os.path.join(res[0], res[1])
1556 os.symlink(src, dest)
1557 symlinked.append(dest)
1559 return (exists, symlinked)