2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite
43 from dak_exceptions import *
44 from textutils import fix_maintainer
45 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
46 re_multi_line_field, re_srchasver, re_taint_free, \
47 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource
49 from formats import parse_format, validate_changes_format
50 from srcformats import get_format_from_string
51 from collections import defaultdict
53 ################################################################################
55 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
56 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
58 alias_cache = None #: Cache for email alias checks
59 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
61 # (hashname, function, earliest_changes_version)
62 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
63 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
65 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
66 # code in lenny's Python. This also affects commands.getoutput and
68 def dak_getstatusoutput(cmd):
69 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
70 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
72 output = "".join(pipe.stdout.readlines())
74 if output[-1:] == '\n':
82 commands.getstatusoutput = dak_getstatusoutput
84 ################################################################################
87 """ Escape html chars """
88 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
90 ################################################################################
92 def open_file(filename, mode='r'):
94 Open C{file}, return fileobject.
96 @type filename: string
97 @param filename: path/filename to open
100 @param mode: open mode
103 @return: open fileobject
105 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
109 f = open(filename, mode)
111 raise CantOpenError, filename
114 ################################################################################
116 def our_raw_input(prompt=""):
120 sys.stdout.write(prompt)
129 sys.stderr.write("\nUser interrupt (^D).\n")
132 ################################################################################
134 def extract_component_from_section(section):
137 if section.find('/') != -1:
138 component = section.split('/')[0]
140 # Expand default component
142 if Cnf.has_key("Component::%s" % section):
147 return (section, component)
149 ################################################################################
151 def parse_deb822(contents, signing_rules=0):
155 # Split the lines in the input, keeping the linebreaks.
156 lines = contents.splitlines(True)
159 raise ParseChangesError, "[Empty changes file]"
161 # Reindex by line number so we can easily verify the format of
167 indexed_lines[index] = line[:-1]
171 num_of_lines = len(indexed_lines.keys())
174 while index < num_of_lines:
176 line = indexed_lines[index]
178 if signing_rules == 1:
180 if index > num_of_lines:
181 raise InvalidDscError, index
182 line = indexed_lines[index]
183 if not line.startswith("-----BEGIN PGP SIGNATURE"):
184 raise InvalidDscError, index
189 if line.startswith("-----BEGIN PGP SIGNATURE"):
191 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
193 if signing_rules == 1:
194 while index < num_of_lines and line != "":
196 line = indexed_lines[index]
198 # If we're not inside the signed data, don't process anything
199 if signing_rules >= 0 and not inside_signature:
201 slf = re_single_line_field.match(line)
203 field = slf.groups()[0].lower()
204 changes[field] = slf.groups()[1]
208 changes[field] += '\n'
210 mlf = re_multi_line_field.match(line)
213 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
214 if first == 1 and changes[field] != "":
215 changes[field] += '\n'
217 changes[field] += mlf.groups()[0] + '\n'
221 if signing_rules == 1 and inside_signature:
222 raise InvalidDscError, index
224 changes["filecontents"] = "".join(lines)
226 if changes.has_key("source"):
227 # Strip the source version in brackets from the source field,
228 # put it in the "source-version" field instead.
229 srcver = re_srchasver.search(changes["source"])
231 changes["source"] = srcver.group(1)
232 changes["source-version"] = srcver.group(2)
235 raise ParseChangesError, error
239 ################################################################################
241 def parse_changes(filename, signing_rules=0):
243 Parses a changes file and returns a dictionary where each field is a
244 key. The mandatory first argument is the filename of the .changes
247 signing_rules is an optional argument:
249 - If signing_rules == -1, no signature is required.
250 - If signing_rules == 0 (the default), a signature is required.
251 - If signing_rules == 1, it turns on the same strict format checking
254 The rules for (signing_rules == 1)-mode are:
256 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
257 followed by any PGP header data and must end with a blank line.
259 - The data section must end with a blank line and must be followed by
260 "-----BEGIN PGP SIGNATURE-----".
263 changes_in = open_file(filename)
264 content = changes_in.read()
267 unicode(content, 'utf-8')
269 raise ChangesUnicodeError, "Changes file not proper utf-8"
270 return parse_deb822(content, signing_rules)
272 ################################################################################
274 def hash_key(hashname):
275 return '%ssum' % hashname
277 ################################################################################
279 def create_hash(where, files, hashname, hashfunc):
281 create_hash extends the passed files dict with the given hash by
282 iterating over all files on disk and passing them to the hashing
287 for f in files.keys():
289 file_handle = open_file(f)
290 except CantOpenError:
291 rejmsg.append("Could not open file %s for checksumming" % (f))
294 files[f][hash_key(hashname)] = hashfunc(file_handle)
299 ################################################################################
301 def check_hash(where, files, hashname, hashfunc):
303 check_hash checks the given hash in the files dict against the actual
304 files on disk. The hash values need to be present consistently in
305 all file entries. It does not modify its input in any way.
309 for f in files.keys():
313 file_handle = open_file(f)
315 # Check for the hash entry, to not trigger a KeyError.
316 if not files[f].has_key(hash_key(hashname)):
317 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
321 # Actually check the hash for correctness.
322 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
323 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
325 except CantOpenError:
326 # TODO: This happens when the file is in the pool.
327 # warn("Cannot open file %s" % f)
334 ################################################################################
336 def check_size(where, files):
338 check_size checks the file sizes in the passed files dict against the
343 for f in files.keys():
348 # TODO: This happens when the file is in the pool.
352 actual_size = entry[stat.ST_SIZE]
353 size = int(files[f]["size"])
354 if size != actual_size:
355 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
356 % (f, actual_size, size, where))
359 ################################################################################
361 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
363 Verify that the files listed in the Files field of the .dsc are
364 those expected given the announced Format.
366 @type dsc_filename: string
367 @param dsc_filename: path of .dsc file
370 @param dsc: the content of the .dsc parsed by C{parse_changes()}
372 @type dsc_files: dict
373 @param dsc_files: the file list returned by C{build_file_list()}
376 @return: all errors detected
380 # Parse the file if needed
382 dsc = parse_changes(dsc_filename, signing_rules=1);
384 if dsc_files is None:
385 dsc_files = build_file_list(dsc, is_a_dsc=1)
387 # Ensure .dsc lists proper set of source files according to the format
389 has = defaultdict(lambda: 0)
392 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
393 (r'diff.gz', ('debian_diff',)),
394 (r'tar.gz', ('native_tar_gz', 'native_tar')),
395 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
396 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
397 (r'tar\.(gz|bz2)', ('native_tar',)),
398 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
401 for f in dsc_files.keys():
402 m = re_issource.match(f)
404 rejmsg.append("%s: %s in Files field not recognised as source."
408 # Populate 'has' dictionary by resolving keys in lookup table
410 for regex, keys in ftype_lookup:
411 if re.match(regex, m.group(3)):
417 # File does not match anything in lookup table; reject
419 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
421 # Check for multiple files
422 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
423 if has[file_type] > 1:
424 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
426 # Source format specific tests
428 format = get_format_from_string(dsc['format'])
430 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
433 except UnknownFormatError:
434 # Not an error here for now
439 ################################################################################
441 def check_hash_fields(what, manifest):
443 check_hash_fields ensures that there are no checksum fields in the
444 given dict that we do not know about.
448 hashes = map(lambda x: x[0], known_hashes)
449 for field in manifest:
450 if field.startswith("checksums-"):
451 hashname = field.split("-",1)[1]
452 if hashname not in hashes:
453 rejmsg.append("Unsupported checksum field for %s "\
454 "in %s" % (hashname, what))
457 ################################################################################
459 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
460 if format >= version:
461 # The version should contain the specified hash.
464 # Import hashes from the changes
465 rejmsg = parse_checksums(".changes", files, changes, hashname)
469 # We need to calculate the hash because it can't possibly
472 return func(".changes", files, hashname, hashfunc)
474 # We could add the orig which might be in the pool to the files dict to
475 # access the checksums easily.
477 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
479 ensure_dsc_hashes' task is to ensure that each and every *present* hash
480 in the dsc is correct, i.e. identical to the changes file and if necessary
481 the pool. The latter task is delegated to check_hash.
485 if not dsc.has_key('Checksums-%s' % (hashname,)):
487 # Import hashes from the dsc
488 parse_checksums(".dsc", dsc_files, dsc, hashname)
490 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
493 ################################################################################
495 def parse_checksums(where, files, manifest, hashname):
497 field = 'checksums-%s' % hashname
498 if not field in manifest:
500 for line in manifest[field].split('\n'):
503 clist = line.strip().split(' ')
505 checksum, size, checkfile = clist
507 rejmsg.append("Cannot parse checksum line [%s]" % (line))
509 if not files.has_key(checkfile):
510 # TODO: check for the file's entry in the original files dict, not
511 # the one modified by (auto)byhand and other weird stuff
512 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
513 # (file, hashname, where))
515 if not files[checkfile]["size"] == size:
516 rejmsg.append("%s: size differs for files and checksums-%s entry "\
517 "in %s" % (checkfile, hashname, where))
519 files[checkfile][hash_key(hashname)] = checksum
520 for f in files.keys():
521 if not files[f].has_key(hash_key(hashname)):
522 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
526 ################################################################################
528 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
530 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
533 # Make sure we have a Files: field to parse...
534 if not changes.has_key(field):
535 raise NoFilesFieldError
537 # Validate .changes Format: field
539 validate_changes_format(parse_format(changes['format']), field)
541 includes_section = (not is_a_dsc) and field == "files"
543 # Parse each entry/line:
544 for i in changes[field].split('\n'):
548 section = priority = ""
551 (md5, size, section, priority, name) = s
553 (md5, size, name) = s
555 raise ParseChangesError, i
562 (section, component) = extract_component_from_section(section)
564 files[name] = dict(size=size, section=section,
565 priority=priority, component=component)
566 files[name][hashname] = md5
570 ################################################################################
572 def send_mail (message, filename=""):
573 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
575 # Check whether we're supposed to be sending mail
576 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
579 # If we've been passed a string dump it into a temporary file
581 (fd, filename) = tempfile.mkstemp()
582 os.write (fd, message)
585 if Cnf.has_key("Dinstall::MailWhiteList") and \
586 Cnf["Dinstall::MailWhiteList"] != "":
587 message_in = open_file(filename)
588 message_raw = modemail.message_from_file(message_in)
592 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
594 for line in whitelist_in:
595 if not re_whitespace_comment.match(line):
596 if re_re_mark.match(line):
597 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
599 whitelist.append(re.compile(re.escape(line.strip())))
604 fields = ["To", "Bcc", "Cc"]
607 value = message_raw.get(field, None)
610 for item in value.split(","):
611 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
617 if not mail_whitelisted:
618 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
622 # Doesn't have any mail in whitelist so remove the header
624 del message_raw[field]
626 message_raw.replace_header(field, ', '.join(match))
628 # Change message fields in order if we don't have a To header
629 if not message_raw.has_key("To"):
632 if message_raw.has_key(field):
633 message_raw[fields[-1]] = message_raw[field]
634 del message_raw[field]
637 # Clean up any temporary files
638 # and return, as we removed all recipients.
640 os.unlink (filename);
643 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
644 os.write (fd, message_raw.as_string(True));
648 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
650 raise SendmailFailedError, output
652 # Clean up any temporary files
656 ################################################################################
658 def poolify (source, component):
661 if source[:3] == "lib":
662 return component + source[:4] + '/' + source + '/'
664 return component + source[:1] + '/' + source + '/'
666 ################################################################################
668 def move (src, dest, overwrite = 0, perms = 0664):
669 if os.path.exists(dest) and os.path.isdir(dest):
672 dest_dir = os.path.dirname(dest)
673 if not os.path.exists(dest_dir):
674 umask = os.umask(00000)
675 os.makedirs(dest_dir, 02775)
677 #print "Moving %s to %s..." % (src, dest)
678 if os.path.exists(dest) and os.path.isdir(dest):
679 dest += '/' + os.path.basename(src)
680 # Don't overwrite unless forced to
681 if os.path.exists(dest):
683 fubar("Can't move %s to %s - file already exists." % (src, dest))
685 if not os.access(dest, os.W_OK):
686 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
687 shutil.copy2(src, dest)
688 os.chmod(dest, perms)
691 def copy (src, dest, overwrite = 0, perms = 0664):
692 if os.path.exists(dest) and os.path.isdir(dest):
695 dest_dir = os.path.dirname(dest)
696 if not os.path.exists(dest_dir):
697 umask = os.umask(00000)
698 os.makedirs(dest_dir, 02775)
700 #print "Copying %s to %s..." % (src, dest)
701 if os.path.exists(dest) and os.path.isdir(dest):
702 dest += '/' + os.path.basename(src)
703 # Don't overwrite unless forced to
704 if os.path.exists(dest):
706 raise FileExistsError
708 if not os.access(dest, os.W_OK):
709 raise CantOverwriteError
710 shutil.copy2(src, dest)
711 os.chmod(dest, perms)
713 ################################################################################
716 res = socket.gethostbyaddr(socket.gethostname())
717 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
718 if database_hostname:
719 return database_hostname
723 def which_conf_file ():
724 if os.getenv('DAK_CONFIG'):
725 return os.getenv('DAK_CONFIG')
727 res = socket.gethostbyaddr(socket.gethostname())
728 # In case we allow local config files per user, try if one exists
729 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
730 homedir = os.getenv("HOME")
731 confpath = os.path.join(homedir, "/etc/dak.conf")
732 if os.path.exists(confpath):
733 apt_pkg.ReadConfigFileISC(Cnf,default_config)
735 # We are still in here, so there is no local config file or we do
736 # not allow local files. Do the normal stuff.
737 if Cnf.get("Config::" + res[0] + "::DakConfig"):
738 return Cnf["Config::" + res[0] + "::DakConfig"]
740 return default_config
742 def which_apt_conf_file ():
743 res = socket.gethostbyaddr(socket.gethostname())
744 # In case we allow local config files per user, try if one exists
745 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
746 homedir = os.getenv("HOME")
747 confpath = os.path.join(homedir, "/etc/dak.conf")
748 if os.path.exists(confpath):
749 apt_pkg.ReadConfigFileISC(Cnf,default_config)
751 if Cnf.get("Config::" + res[0] + "::AptConfig"):
752 return Cnf["Config::" + res[0] + "::AptConfig"]
754 return default_apt_config
756 def which_alias_file():
757 hostname = socket.gethostbyaddr(socket.gethostname())[0]
758 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
759 if os.path.exists(aliasfn):
764 ################################################################################
766 def TemplateSubst(subst_map, filename):
767 """ Perform a substition of template """
768 templatefile = open_file(filename)
769 template = templatefile.read()
770 for k, v in subst_map.iteritems():
771 template = template.replace(k, str(v))
775 ################################################################################
777 def fubar(msg, exit_code=1):
778 sys.stderr.write("E: %s\n" % (msg))
782 sys.stderr.write("W: %s\n" % (msg))
784 ################################################################################
786 # Returns the user name with a laughable attempt at rfc822 conformancy
787 # (read: removing stray periods).
789 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
792 return pwd.getpwuid(os.getuid())[0]
794 ################################################################################
804 return ("%d%s" % (c, t))
806 ################################################################################
808 def cc_fix_changes (changes):
809 o = changes.get("architecture", "")
811 del changes["architecture"]
812 changes["architecture"] = {}
814 changes["architecture"][j] = 1
816 def changes_compare (a, b):
817 """ Sort by source name, source version, 'have source', and then by filename """
819 a_changes = parse_changes(a)
824 b_changes = parse_changes(b)
828 cc_fix_changes (a_changes)
829 cc_fix_changes (b_changes)
831 # Sort by source name
832 a_source = a_changes.get("source")
833 b_source = b_changes.get("source")
834 q = cmp (a_source, b_source)
838 # Sort by source version
839 a_version = a_changes.get("version", "0")
840 b_version = b_changes.get("version", "0")
841 q = apt_pkg.VersionCompare(a_version, b_version)
845 # Sort by 'have source'
846 a_has_source = a_changes["architecture"].get("source")
847 b_has_source = b_changes["architecture"].get("source")
848 if a_has_source and not b_has_source:
850 elif b_has_source and not a_has_source:
853 # Fall back to sort by filename
856 ################################################################################
858 def find_next_free (dest, too_many=100):
861 while os.path.exists(dest) and extra < too_many:
862 dest = orig_dest + '.' + repr(extra)
864 if extra >= too_many:
865 raise NoFreeFilenameError
868 ################################################################################
870 def result_join (original, sep = '\t'):
872 for i in xrange(len(original)):
873 if original[i] == None:
874 resultlist.append("")
876 resultlist.append(original[i])
877 return sep.join(resultlist)
879 ################################################################################
881 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
883 for line in str.split('\n'):
885 if line or include_blank_lines:
886 out += "%s%s\n" % (prefix, line)
887 # Strip trailing new line
892 ################################################################################
894 def validate_changes_file_arg(filename, require_changes=1):
896 'filename' is either a .changes or .dak file. If 'filename' is a
897 .dak file, it's changed to be the corresponding .changes file. The
898 function then checks if the .changes file a) exists and b) is
899 readable and returns the .changes filename if so. If there's a
900 problem, the next action depends on the option 'require_changes'
903 - If 'require_changes' == -1, errors are ignored and the .changes
904 filename is returned.
905 - If 'require_changes' == 0, a warning is given and 'None' is returned.
906 - If 'require_changes' == 1, a fatal error is raised.
911 orig_filename = filename
912 if filename.endswith(".dak"):
913 filename = filename[:-4]+".changes"
915 if not filename.endswith(".changes"):
916 error = "invalid file type; not a changes file"
918 if not os.access(filename,os.R_OK):
919 if os.path.exists(filename):
920 error = "permission denied"
922 error = "file not found"
925 if require_changes == 1:
926 fubar("%s: %s." % (orig_filename, error))
927 elif require_changes == 0:
928 warn("Skipping %s - %s" % (orig_filename, error))
930 else: # We only care about the .dak file
935 ################################################################################
938 return (arch != "source" and arch != "all")
940 ################################################################################
942 def join_with_commas_and(list):
943 if len(list) == 0: return "nothing"
944 if len(list) == 1: return list[0]
945 return ", ".join(list[:-1]) + " and " + list[-1]
947 ################################################################################
952 (pkg, version, constraint) = atom
954 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
957 pp_deps.append(pp_dep)
958 return " |".join(pp_deps)
960 ################################################################################
965 ################################################################################
967 def parse_args(Options):
968 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
969 # XXX: This should go away and everything which calls it be converted
970 # to use SQLA properly. For now, we'll just fix it not to use
971 # the old Pg interface though
972 session = DBConn().session()
976 for suitename in split_args(Options["Suite"]):
977 suite = get_suite(suitename, session=session)
978 if suite.suite_id is None:
979 warn("suite '%s' not recognised." % (suite.suite_name))
981 suite_ids_list.append(suite.suite_id)
983 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
985 fubar("No valid suite given.")
990 if Options["Component"]:
991 component_ids_list = []
992 for componentname in split_args(Options["Component"]):
993 component = get_component(componentname, session=session)
994 if component is None:
995 warn("component '%s' not recognised." % (componentname))
997 component_ids_list.append(component.component_id)
998 if component_ids_list:
999 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1001 fubar("No valid component given.")
1005 # Process architecture
1006 con_architectures = ""
1008 if Options["Architecture"]:
1010 for archname in split_args(Options["Architecture"]):
1011 if archname == "source":
1014 arch = get_architecture(archname, session=session)
1016 warn("architecture '%s' not recognised." % (archname))
1018 arch_ids_list.append(arch.arch_id)
1020 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1022 if not check_source:
1023 fubar("No valid architecture given.")
1027 return (con_suites, con_architectures, con_components, check_source)
1029 ################################################################################
1031 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1032 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1035 tb = sys.exc_info()[2]
1042 frame = frame.f_back
1044 traceback.print_exc()
1046 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1047 frame.f_code.co_filename,
1049 for key, value in frame.f_locals.items():
1050 print "\t%20s = " % key,
1054 print "<unable to print>"
1056 ################################################################################
1058 def try_with_debug(function):
1066 ################################################################################
1068 def arch_compare_sw (a, b):
1070 Function for use in sorting lists of architectures.
1072 Sorts normally except that 'source' dominates all others.
1075 if a == "source" and b == "source":
1084 ################################################################################
1086 def split_args (s, dwim=1):
1088 Split command line arguments which can be separated by either commas
1089 or whitespace. If dwim is set, it will complain about string ending
1090 in comma since this usually means someone did 'dak ls -a i386, m68k
1091 foo' or something and the inevitable confusion resulting from 'm68k'
1092 being treated as an argument is undesirable.
1095 if s.find(",") == -1:
1098 if s[-1:] == "," and dwim:
1099 fubar("split_args: found trailing comma, spurious space maybe?")
1102 ################################################################################
1104 def gpgv_get_status_output(cmd, status_read, status_write):
1106 Our very own version of commands.getouputstatus(), hacked to support
1110 cmd = ['/bin/sh', '-c', cmd]
1111 p2cread, p2cwrite = os.pipe()
1112 c2pread, c2pwrite = os.pipe()
1113 errout, errin = os.pipe()
1123 for i in range(3, 256):
1124 if i != status_write:
1130 os.execvp(cmd[0], cmd)
1136 os.dup2(c2pread, c2pwrite)
1137 os.dup2(errout, errin)
1139 output = status = ""
1141 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1144 r = os.read(fd, 8196)
1146 more_data.append(fd)
1147 if fd == c2pwrite or fd == errin:
1149 elif fd == status_read:
1152 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1154 pid, exit_status = os.waitpid(pid, 0)
1156 os.close(status_write)
1157 os.close(status_read)
1167 return output, status, exit_status
1169 ################################################################################
1171 def process_gpgv_output(status):
1172 # Process the status-fd output
1175 for line in status.split('\n'):
1179 split = line.split()
1181 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1183 (gnupg, keyword) = split[:2]
1184 if gnupg != "[GNUPG:]":
1185 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1188 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1189 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1192 keywords[keyword] = args
1194 return (keywords, internal_error)
1196 ################################################################################
1198 def retrieve_key (filename, keyserver=None, keyring=None):
1200 Retrieve the key that signed 'filename' from 'keyserver' and
1201 add it to 'keyring'. Returns nothing on success, or an error message
1205 # Defaults for keyserver and keyring
1207 keyserver = Cnf["Dinstall::KeyServer"]
1209 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1211 # Ensure the filename contains no shell meta-characters or other badness
1212 if not re_taint_free.match(filename):
1213 return "%s: tainted filename" % (filename)
1215 # Invoke gpgv on the file
1216 status_read, status_write = os.pipe()
1217 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1218 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1220 # Process the status-fd output
1221 (keywords, internal_error) = process_gpgv_output(status)
1223 return internal_error
1225 if not keywords.has_key("NO_PUBKEY"):
1226 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1228 fingerprint = keywords["NO_PUBKEY"][0]
1229 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1230 # it'll try to create a lockfile in /dev. A better solution might
1231 # be a tempfile or something.
1232 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1233 % (Cnf["Dinstall::SigningKeyring"])
1234 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1235 % (keyring, keyserver, fingerprint)
1236 (result, output) = commands.getstatusoutput(cmd)
1238 return "'%s' failed with exit code %s" % (cmd, result)
1242 ################################################################################
1244 def gpg_keyring_args(keyrings=None):
1246 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1248 return " ".join(["--keyring %s" % x for x in keyrings])
1250 ################################################################################
1252 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1254 Check the signature of a file and return the fingerprint if the
1255 signature is valid or 'None' if it's not. The first argument is the
1256 filename whose signature should be checked. The second argument is a
1257 reject function and is called when an error is found. The reject()
1258 function must allow for two arguments: the first is the error message,
1259 the second is an optional prefix string. It's possible for reject()
1260 to be called more than once during an invocation of check_signature().
1261 The third argument is optional and is the name of the files the
1262 detached signature applies to. The fourth argument is optional and is
1263 a *list* of keyrings to use. 'autofetch' can either be None, True or
1264 False. If None, the default behaviour specified in the config will be
1270 # Ensure the filename contains no shell meta-characters or other badness
1271 if not re_taint_free.match(sig_filename):
1272 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1273 return (None, rejects)
1275 if data_filename and not re_taint_free.match(data_filename):
1276 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1277 return (None, rejects)
1280 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1282 # Autofetch the signing key if that's enabled
1283 if autofetch == None:
1284 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1286 error_msg = retrieve_key(sig_filename)
1288 rejects.append(error_msg)
1289 return (None, rejects)
1291 # Build the command line
1292 status_read, status_write = os.pipe()
1293 cmd = "gpgv --status-fd %s %s %s %s" % (
1294 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1296 # Invoke gpgv on the file
1297 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1299 # Process the status-fd output
1300 (keywords, internal_error) = process_gpgv_output(status)
1302 # If we failed to parse the status-fd output, let's just whine and bail now
1304 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1305 rejects.append(internal_error, "")
1306 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1307 return (None, rejects)
1309 # Now check for obviously bad things in the processed output
1310 if keywords.has_key("KEYREVOKED"):
1311 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1312 if keywords.has_key("BADSIG"):
1313 rejects.append("bad signature on %s." % (sig_filename))
1314 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1315 rejects.append("failed to check signature on %s." % (sig_filename))
1316 if keywords.has_key("NO_PUBKEY"):
1317 args = keywords["NO_PUBKEY"]
1320 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1321 if keywords.has_key("BADARMOR"):
1322 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1323 if keywords.has_key("NODATA"):
1324 rejects.append("no signature found in %s." % (sig_filename))
1325 if keywords.has_key("EXPKEYSIG"):
1326 args = keywords["EXPKEYSIG"]
1329 rejects.append("Signature made by expired key 0x%s" % (key))
1330 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1331 args = keywords["KEYEXPIRED"]
1335 if timestamp.count("T") == 0:
1337 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1339 expiredate = "unknown (%s)" % (timestamp)
1341 expiredate = timestamp
1342 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1344 if len(rejects) > 0:
1345 return (None, rejects)
1347 # Next check gpgv exited with a zero return code
1349 rejects.append("gpgv failed while checking %s." % (sig_filename))
1351 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1353 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1354 return (None, rejects)
1356 # Sanity check the good stuff we expect
1357 if not keywords.has_key("VALIDSIG"):
1358 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1360 args = keywords["VALIDSIG"]
1362 rejects.append("internal error while checking signature on %s." % (sig_filename))
1364 fingerprint = args[0]
1365 if not keywords.has_key("GOODSIG"):
1366 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1367 if not keywords.has_key("SIG_ID"):
1368 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1370 # Finally ensure there's not something we don't recognise
1371 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1372 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1373 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1375 for keyword in keywords.keys():
1376 if not known_keywords.has_key(keyword):
1377 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1379 if len(rejects) > 0:
1380 return (None, rejects)
1382 return (fingerprint, [])
1384 ################################################################################
1386 def gpg_get_key_addresses(fingerprint):
1387 """retreive email addresses from gpg key uids for a given fingerprint"""
1388 addresses = key_uid_email_cache.get(fingerprint)
1389 if addresses != None:
1392 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1393 % (gpg_keyring_args(), fingerprint)
1394 (result, output) = commands.getstatusoutput(cmd)
1396 for l in output.split('\n'):
1397 m = re_gpg_uid.match(l)
1399 addresses.add(m.group(1))
1400 key_uid_email_cache[fingerprint] = addresses
1403 ################################################################################
1405 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1407 def wrap(paragraph, max_length, prefix=""):
1411 words = paragraph.split()
1414 word_size = len(word)
1415 if word_size > max_length:
1417 s += line + '\n' + prefix
1418 s += word + '\n' + prefix
1421 new_length = len(line) + word_size + 1
1422 if new_length > max_length:
1423 s += line + '\n' + prefix
1436 ################################################################################
1438 def clean_symlink (src, dest, root):
1440 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1443 src = src.replace(root, '', 1)
1444 dest = dest.replace(root, '', 1)
1445 dest = os.path.dirname(dest)
1446 new_src = '../' * len(dest.split('/'))
1447 return new_src + src
1449 ################################################################################
1451 def temp_filename(directory=None, prefix="dak", suffix=""):
1453 Return a secure and unique filename by pre-creating it.
1454 If 'directory' is non-null, it will be the directory the file is pre-created in.
1455 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1456 If 'suffix' is non-null, the filename will end with it.
1458 Returns a pair (fd, name).
1461 return tempfile.mkstemp(suffix, prefix, directory)
1463 ################################################################################
1465 def temp_dirname(parent=None, prefix="dak", suffix=""):
1467 Return a secure and unique directory by pre-creating it.
1468 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1469 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1470 If 'suffix' is non-null, the filename will end with it.
1472 Returns a pathname to the new directory
1475 return tempfile.mkdtemp(suffix, prefix, parent)
1477 ################################################################################
1479 def is_email_alias(email):
1480 """ checks if the user part of the email is listed in the alias file """
1482 if alias_cache == None:
1483 aliasfn = which_alias_file()
1486 for l in open(aliasfn):
1487 alias_cache.add(l.split(':')[0])
1488 uid = email.split('@')[0]
1489 return uid in alias_cache
1491 ################################################################################
1493 def get_changes_files(from_dir):
1495 Takes a directory and lists all .changes files in it (as well as chdir'ing
1496 to the directory; this is due to broken behaviour on the part of p-u/p-a
1497 when you're not in the right place)
1499 Returns a list of filenames
1502 # Much of the rest of p-u/p-a depends on being in the right place
1504 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1506 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1508 return changes_files
1510 ################################################################################
1514 Cnf = apt_pkg.newConfiguration()
1515 if not os.getenv("DAK_TEST"):
1516 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1518 if which_conf_file() != default_config:
1519 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())