2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, \
49 re_re_mark, re_whitespace_comment, re_issource
51 from srcformats import srcformats
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 ################################################################################
69 """ Escape html chars """
70 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
72 ################################################################################
74 def open_file(filename, mode='r'):
76 Open C{file}, return fileobject.
78 @type filename: string
79 @param filename: path/filename to open
82 @param mode: open mode
85 @return: open fileobject
87 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
91 f = open(filename, mode)
93 raise CantOpenError, filename
96 ################################################################################
98 def our_raw_input(prompt=""):
100 sys.stdout.write(prompt)
106 sys.stderr.write("\nUser interrupt (^D).\n")
109 ################################################################################
111 def extract_component_from_section(section):
114 if section.find('/') != -1:
115 component = section.split('/')[0]
117 # Expand default component
119 if Cnf.has_key("Component::%s" % section):
124 return (section, component)
126 ################################################################################
128 def parse_deb822(contents, signing_rules=0):
132 # Split the lines in the input, keeping the linebreaks.
133 lines = contents.splitlines(True)
136 raise ParseChangesError, "[Empty changes file]"
138 # Reindex by line number so we can easily verify the format of
144 indexed_lines[index] = line[:-1]
148 num_of_lines = len(indexed_lines.keys())
151 while index < num_of_lines:
153 line = indexed_lines[index]
155 if signing_rules == 1:
157 if index > num_of_lines:
158 raise InvalidDscError, index
159 line = indexed_lines[index]
160 if not line.startswith("-----BEGIN PGP SIGNATURE"):
161 raise InvalidDscError, index
166 if line.startswith("-----BEGIN PGP SIGNATURE"):
168 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
170 if signing_rules == 1:
171 while index < num_of_lines and line != "":
173 line = indexed_lines[index]
175 # If we're not inside the signed data, don't process anything
176 if signing_rules >= 0 and not inside_signature:
178 slf = re_single_line_field.match(line)
180 field = slf.groups()[0].lower()
181 changes[field] = slf.groups()[1]
185 changes[field] += '\n'
187 mlf = re_multi_line_field.match(line)
190 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
191 if first == 1 and changes[field] != "":
192 changes[field] += '\n'
194 changes[field] += mlf.groups()[0] + '\n'
198 if signing_rules == 1 and inside_signature:
199 raise InvalidDscError, index
201 changes["filecontents"] = "".join(lines)
203 if changes.has_key("source"):
204 # Strip the source version in brackets from the source field,
205 # put it in the "source-version" field instead.
206 srcver = re_srchasver.search(changes["source"])
208 changes["source"] = srcver.group(1)
209 changes["source-version"] = srcver.group(2)
212 raise ParseChangesError, error
216 ################################################################################
218 def parse_changes(filename, signing_rules=0):
220 Parses a changes file and returns a dictionary where each field is a
221 key. The mandatory first argument is the filename of the .changes
224 signing_rules is an optional argument:
226 - If signing_rules == -1, no signature is required.
227 - If signing_rules == 0 (the default), a signature is required.
228 - If signing_rules == 1, it turns on the same strict format checking
231 The rules for (signing_rules == 1)-mode are:
233 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
234 followed by any PGP header data and must end with a blank line.
236 - The data section must end with a blank line and must be followed by
237 "-----BEGIN PGP SIGNATURE-----".
240 changes_in = open_file(filename)
241 content = changes_in.read()
244 unicode(content, 'utf-8')
246 raise ChangesUnicodeError, "Changes file not proper utf-8"
247 return parse_deb822(content, signing_rules)
249 ################################################################################
251 def hash_key(hashname):
252 return '%ssum' % hashname
254 ################################################################################
256 def create_hash(where, files, hashname, hashfunc):
258 create_hash extends the passed files dict with the given hash by
259 iterating over all files on disk and passing them to the hashing
264 for f in files.keys():
266 file_handle = open_file(f)
267 except CantOpenError:
268 rejmsg.append("Could not open file %s for checksumming" % (f))
271 files[f][hash_key(hashname)] = hashfunc(file_handle)
276 ################################################################################
278 def check_hash(where, files, hashname, hashfunc):
280 check_hash checks the given hash in the files dict against the actual
281 files on disk. The hash values need to be present consistently in
282 all file entries. It does not modify its input in any way.
286 for f in files.keys():
290 file_handle = open_file(f)
292 # Check for the hash entry, to not trigger a KeyError.
293 if not files[f].has_key(hash_key(hashname)):
294 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
298 # Actually check the hash for correctness.
299 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
300 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
302 except CantOpenError:
303 # TODO: This happens when the file is in the pool.
304 # warn("Cannot open file %s" % f)
311 ################################################################################
313 def check_size(where, files):
315 check_size checks the file sizes in the passed files dict against the
320 for f in files.keys():
325 # TODO: This happens when the file is in the pool.
329 actual_size = entry[stat.ST_SIZE]
330 size = int(files[f]["size"])
331 if size != actual_size:
332 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
333 % (f, actual_size, size, where))
336 ################################################################################
338 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
340 Verify that the files listed in the Files field of the .dsc are
341 those expected given the announced Format.
343 @type dsc_filename: string
344 @param dsc_filename: path of .dsc file
347 @param dsc: the content of the .dsc parsed by C{parse_changes()}
349 @type dsc_files: dict
350 @param dsc_files: the file list returned by C{build_file_list()}
353 @return: all errors detected
357 # Parse the file if needed
359 dsc = parse_changes(dsc_filename, signing_rules=1);
361 if dsc_files is None:
362 dsc_files = build_file_list(dsc, is_a_dsc=1)
364 # Ensure .dsc lists proper set of source files according to the format
366 has = defaultdict(lambda: 0)
368 for f in dsc_files.keys():
369 m = re_issource.match(f)
371 rejmsg.append("%s: %s in Files field not recognised as source."
375 if ftype == "orig.tar.gz":
376 has['orig_tar_gz'] += 1
378 elif ftype == "diff.gz":
379 has['debian_diff'] += 1
380 elif ftype == "tar.gz":
381 has['native_tar_gz'] += 1
382 has['native_tar'] += 1
383 elif re.match(r"debian\.tar\.(gz|bz2|lzma)", ftype):
384 has['debian_tar'] += 1
385 elif re.match(r"orig\.tar\.(gz|bz2|lzma)", ftype):
387 elif re.match(r"tar\.(gz|bz2|lzma)", ftype):
388 has['native_tar'] += 1
389 elif re.match(r"orig-.+\.tar\.(gz|bz2|lzma)", ftype):
390 has['more_orig_tar'] += 1
392 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
393 if has['orig_tar'] > 1:
394 rejmsg.append("%s: lists multiple .orig tarballs." % (dsc_filename))
395 if has['native_tar'] > 1:
396 rejmsg.append("%s: lists multiple native tarballs." % (dsc_filename))
397 if has['debian_tar'] > 1 or has['debian_diff'] > 1:
398 rejmsg.append("%s: lists multiple debian diff/tarballs." % (dsc_filename))
400 for format in srcformats:
401 if format.re_format.match(dsc['format']):
403 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
409 ################################################################################
411 def check_hash_fields(what, manifest):
413 check_hash_fields ensures that there are no checksum fields in the
414 given dict that we do not know about.
418 hashes = map(lambda x: x[0], known_hashes)
419 for field in manifest:
420 if field.startswith("checksums-"):
421 hashname = field.split("-",1)[1]
422 if hashname not in hashes:
423 rejmsg.append("Unsupported checksum field for %s "\
424 "in %s" % (hashname, what))
427 ################################################################################
429 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
430 if format >= version:
431 # The version should contain the specified hash.
434 # Import hashes from the changes
435 rejmsg = parse_checksums(".changes", files, changes, hashname)
439 # We need to calculate the hash because it can't possibly
442 return func(".changes", files, hashname, hashfunc)
444 # We could add the orig which might be in the pool to the files dict to
445 # access the checksums easily.
447 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
449 ensure_dsc_hashes' task is to ensure that each and every *present* hash
450 in the dsc is correct, i.e. identical to the changes file and if necessary
451 the pool. The latter task is delegated to check_hash.
455 if not dsc.has_key('Checksums-%s' % (hashname,)):
457 # Import hashes from the dsc
458 parse_checksums(".dsc", dsc_files, dsc, hashname)
460 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
463 ################################################################################
465 def parse_checksums(where, files, manifest, hashname):
467 field = 'checksums-%s' % hashname
468 if not field in manifest:
470 for line in manifest[field].split('\n'):
473 clist = line.strip().split(' ')
475 checksum, size, checkfile = clist
477 rejmsg.append("Cannot parse checksum line [%s]" % (line))
479 if not files.has_key(checkfile):
480 # TODO: check for the file's entry in the original files dict, not
481 # the one modified by (auto)byhand and other weird stuff
482 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
483 # (file, hashname, where))
485 if not files[checkfile]["size"] == size:
486 rejmsg.append("%s: size differs for files and checksums-%s entry "\
487 "in %s" % (checkfile, hashname, where))
489 files[checkfile][hash_key(hashname)] = checksum
490 for f in files.keys():
491 if not files[f].has_key(hash_key(hashname)):
492 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
496 ################################################################################
498 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
500 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
503 # Make sure we have a Files: field to parse...
504 if not changes.has_key(field):
505 raise NoFilesFieldError
507 # Make sure we recognise the format of the Files: field
508 format = re_verwithext.search(changes.get("format", "0.0"))
510 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
512 format = format.groups()
513 if format[1] == None:
514 format = int(float(format[0])), 0, format[2]
516 format = int(format[0]), int(format[1]), format[2]
517 if format[2] == None:
521 # format = (0,0) are missing format headers of which we still
522 # have some in the archive.
523 if format != (1,0) and format != (0,0) and \
524 format != (3,0,"quilt") and format != (3,0,"native"):
525 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
527 if (format < (1,5) or format > (1,8)):
528 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
529 if field != "files" and format < (1,8):
530 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
532 includes_section = (not is_a_dsc) and field == "files"
534 # Parse each entry/line:
535 for i in changes[field].split('\n'):
539 section = priority = ""
542 (md5, size, section, priority, name) = s
544 (md5, size, name) = s
546 raise ParseChangesError, i
553 (section, component) = extract_component_from_section(section)
555 files[name] = Dict(size=size, section=section,
556 priority=priority, component=component)
557 files[name][hashname] = md5
561 ################################################################################
563 def send_mail (message, filename=""):
564 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
566 # If we've been passed a string dump it into a temporary file
568 (fd, filename) = tempfile.mkstemp()
569 os.write (fd, message)
572 if Cnf.has_key("Dinstall::MailWhiteList") and \
573 Cnf["Dinstall::MailWhiteList"] != "":
574 message_in = open_file(filename)
575 message_raw = modemail.message_from_file(message_in)
579 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
581 for line in whitelist_in:
582 if not re_whitespace_comment.match(line):
583 if re_re_mark.match(line):
584 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
586 whitelist.append(re.compile(re.escape(line.strip())))
591 fields = ["To", "Bcc", "Cc"]
594 value = message_raw.get(field, None)
597 for item in value.split(","):
598 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
604 if not mail_whitelisted:
605 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
609 # Doesn't have any mail in whitelist so remove the header
611 del message_raw[field]
613 message_raw.replace_header(field, string.join(match, ", "))
615 # Change message fields in order if we don't have a To header
616 if not message_raw.has_key("To"):
619 if message_raw.has_key(field):
620 message_raw[fields[-1]] = message_raw[field]
621 del message_raw[field]
624 # Clean up any temporary files
625 # and return, as we removed all recipients.
627 os.unlink (filename);
630 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
631 os.write (fd, message_raw.as_string(True));
635 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
637 raise SendmailFailedError, output
639 # Clean up any temporary files
643 ################################################################################
645 def poolify (source, component):
648 if source[:3] == "lib":
649 return component + source[:4] + '/' + source + '/'
651 return component + source[:1] + '/' + source + '/'
653 ################################################################################
655 def move (src, dest, overwrite = 0, perms = 0664):
656 if os.path.exists(dest) and os.path.isdir(dest):
659 dest_dir = os.path.dirname(dest)
660 if not os.path.exists(dest_dir):
661 umask = os.umask(00000)
662 os.makedirs(dest_dir, 02775)
664 #print "Moving %s to %s..." % (src, dest)
665 if os.path.exists(dest) and os.path.isdir(dest):
666 dest += '/' + os.path.basename(src)
667 # Don't overwrite unless forced to
668 if os.path.exists(dest):
670 fubar("Can't move %s to %s - file already exists." % (src, dest))
672 if not os.access(dest, os.W_OK):
673 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
674 shutil.copy2(src, dest)
675 os.chmod(dest, perms)
678 def copy (src, dest, overwrite = 0, perms = 0664):
679 if os.path.exists(dest) and os.path.isdir(dest):
682 dest_dir = os.path.dirname(dest)
683 if not os.path.exists(dest_dir):
684 umask = os.umask(00000)
685 os.makedirs(dest_dir, 02775)
687 #print "Copying %s to %s..." % (src, dest)
688 if os.path.exists(dest) and os.path.isdir(dest):
689 dest += '/' + os.path.basename(src)
690 # Don't overwrite unless forced to
691 if os.path.exists(dest):
693 raise FileExistsError
695 if not os.access(dest, os.W_OK):
696 raise CantOverwriteError
697 shutil.copy2(src, dest)
698 os.chmod(dest, perms)
700 ################################################################################
703 res = socket.gethostbyaddr(socket.gethostname())
704 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
705 if database_hostname:
706 return database_hostname
710 def which_conf_file ():
711 res = socket.gethostbyaddr(socket.gethostname())
712 # In case we allow local config files per user, try if one exists
713 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
714 homedir = os.getenv("HOME")
715 confpath = os.path.join(homedir, "/etc/dak.conf")
716 if os.path.exists(confpath):
717 apt_pkg.ReadConfigFileISC(Cnf,default_config)
719 # We are still in here, so there is no local config file or we do
720 # not allow local files. Do the normal stuff.
721 if Cnf.get("Config::" + res[0] + "::DakConfig"):
722 return Cnf["Config::" + res[0] + "::DakConfig"]
724 return default_config
726 def which_apt_conf_file ():
727 res = socket.gethostbyaddr(socket.gethostname())
728 # In case we allow local config files per user, try if one exists
729 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
730 homedir = os.getenv("HOME")
731 confpath = os.path.join(homedir, "/etc/dak.conf")
732 if os.path.exists(confpath):
733 apt_pkg.ReadConfigFileISC(Cnf,default_config)
735 if Cnf.get("Config::" + res[0] + "::AptConfig"):
736 return Cnf["Config::" + res[0] + "::AptConfig"]
738 return default_apt_config
740 def which_alias_file():
741 hostname = socket.gethostbyaddr(socket.gethostname())[0]
742 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
743 if os.path.exists(aliasfn):
748 ################################################################################
750 def TemplateSubst(map, filename):
751 """ Perform a substition of template """
752 templatefile = open_file(filename)
753 template = templatefile.read()
755 template = template.replace(x, str(map[x]))
759 ################################################################################
761 def fubar(msg, exit_code=1):
762 sys.stderr.write("E: %s\n" % (msg))
766 sys.stderr.write("W: %s\n" % (msg))
768 ################################################################################
770 # Returns the user name with a laughable attempt at rfc822 conformancy
771 # (read: removing stray periods).
773 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
776 return pwd.getpwuid(os.getuid())[0]
778 ################################################################################
788 return ("%d%s" % (c, t))
790 ################################################################################
792 def cc_fix_changes (changes):
793 o = changes.get("architecture", "")
795 del changes["architecture"]
796 changes["architecture"] = {}
798 changes["architecture"][j] = 1
800 def changes_compare (a, b):
801 """ Sort by source name, source version, 'have source', and then by filename """
803 a_changes = parse_changes(a)
808 b_changes = parse_changes(b)
812 cc_fix_changes (a_changes)
813 cc_fix_changes (b_changes)
815 # Sort by source name
816 a_source = a_changes.get("source")
817 b_source = b_changes.get("source")
818 q = cmp (a_source, b_source)
822 # Sort by source version
823 a_version = a_changes.get("version", "0")
824 b_version = b_changes.get("version", "0")
825 q = apt_pkg.VersionCompare(a_version, b_version)
829 # Sort by 'have source'
830 a_has_source = a_changes["architecture"].get("source")
831 b_has_source = b_changes["architecture"].get("source")
832 if a_has_source and not b_has_source:
834 elif b_has_source and not a_has_source:
837 # Fall back to sort by filename
840 ################################################################################
842 def find_next_free (dest, too_many=100):
845 while os.path.exists(dest) and extra < too_many:
846 dest = orig_dest + '.' + repr(extra)
848 if extra >= too_many:
849 raise NoFreeFilenameError
852 ################################################################################
854 def result_join (original, sep = '\t'):
856 for i in xrange(len(original)):
857 if original[i] == None:
858 resultlist.append("")
860 resultlist.append(original[i])
861 return sep.join(resultlist)
863 ################################################################################
865 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
867 for line in str.split('\n'):
869 if line or include_blank_lines:
870 out += "%s%s\n" % (prefix, line)
871 # Strip trailing new line
876 ################################################################################
878 def validate_changes_file_arg(filename, require_changes=1):
880 'filename' is either a .changes or .dak file. If 'filename' is a
881 .dak file, it's changed to be the corresponding .changes file. The
882 function then checks if the .changes file a) exists and b) is
883 readable and returns the .changes filename if so. If there's a
884 problem, the next action depends on the option 'require_changes'
887 - If 'require_changes' == -1, errors are ignored and the .changes
888 filename is returned.
889 - If 'require_changes' == 0, a warning is given and 'None' is returned.
890 - If 'require_changes' == 1, a fatal error is raised.
895 orig_filename = filename
896 if filename.endswith(".dak"):
897 filename = filename[:-4]+".changes"
899 if not filename.endswith(".changes"):
900 error = "invalid file type; not a changes file"
902 if not os.access(filename,os.R_OK):
903 if os.path.exists(filename):
904 error = "permission denied"
906 error = "file not found"
909 if require_changes == 1:
910 fubar("%s: %s." % (orig_filename, error))
911 elif require_changes == 0:
912 warn("Skipping %s - %s" % (orig_filename, error))
914 else: # We only care about the .dak file
919 ################################################################################
922 return (arch != "source" and arch != "all")
924 ################################################################################
926 def join_with_commas_and(list):
927 if len(list) == 0: return "nothing"
928 if len(list) == 1: return list[0]
929 return ", ".join(list[:-1]) + " and " + list[-1]
931 ################################################################################
936 (pkg, version, constraint) = atom
938 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
941 pp_deps.append(pp_dep)
942 return " |".join(pp_deps)
944 ################################################################################
949 ################################################################################
951 def parse_args(Options):
952 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
953 # XXX: This should go away and everything which calls it be converted
954 # to use SQLA properly. For now, we'll just fix it not to use
955 # the old Pg interface though
956 session = DBConn().session()
960 for suitename in split_args(Options["Suite"]):
961 suite = get_suite(suitename, session=session)
962 if suite.suite_id is None:
963 warn("suite '%s' not recognised." % (suite.suite_name))
965 suite_ids_list.append(suite.suite_id)
967 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
969 fubar("No valid suite given.")
974 if Options["Component"]:
975 component_ids_list = []
976 for componentname in split_args(Options["Component"]):
977 component = get_component(componentname, session=session)
978 if component is None:
979 warn("component '%s' not recognised." % (componentname))
981 component_ids_list.append(component.component_id)
982 if component_ids_list:
983 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
985 fubar("No valid component given.")
989 # Process architecture
990 con_architectures = ""
992 if Options["Architecture"]:
994 for archname in split_args(Options["Architecture"]):
995 if archname == "source":
998 arch = get_architecture(archname, session=session)
1000 warn("architecture '%s' not recognised." % (archname))
1002 arch_ids_list.append(arch.arch_id)
1004 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1006 if not check_source:
1007 fubar("No valid architecture given.")
1011 return (con_suites, con_architectures, con_components, check_source)
1013 ################################################################################
1015 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1016 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1019 tb = sys.exc_info()[2]
1026 frame = frame.f_back
1028 traceback.print_exc()
1030 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1031 frame.f_code.co_filename,
1033 for key, value in frame.f_locals.items():
1034 print "\t%20s = " % key,
1038 print "<unable to print>"
1040 ################################################################################
1042 def try_with_debug(function):
1050 ################################################################################
1052 def arch_compare_sw (a, b):
1054 Function for use in sorting lists of architectures.
1056 Sorts normally except that 'source' dominates all others.
1059 if a == "source" and b == "source":
1068 ################################################################################
1070 def split_args (s, dwim=1):
1072 Split command line arguments which can be separated by either commas
1073 or whitespace. If dwim is set, it will complain about string ending
1074 in comma since this usually means someone did 'dak ls -a i386, m68k
1075 foo' or something and the inevitable confusion resulting from 'm68k'
1076 being treated as an argument is undesirable.
1079 if s.find(",") == -1:
1082 if s[-1:] == "," and dwim:
1083 fubar("split_args: found trailing comma, spurious space maybe?")
1086 ################################################################################
1088 def Dict(**dict): return dict
1090 ########################################
1092 def gpgv_get_status_output(cmd, status_read, status_write):
1094 Our very own version of commands.getouputstatus(), hacked to support
1098 cmd = ['/bin/sh', '-c', cmd]
1099 p2cread, p2cwrite = os.pipe()
1100 c2pread, c2pwrite = os.pipe()
1101 errout, errin = os.pipe()
1111 for i in range(3, 256):
1112 if i != status_write:
1118 os.execvp(cmd[0], cmd)
1124 os.dup2(c2pread, c2pwrite)
1125 os.dup2(errout, errin)
1127 output = status = ""
1129 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1132 r = os.read(fd, 8196)
1134 more_data.append(fd)
1135 if fd == c2pwrite or fd == errin:
1137 elif fd == status_read:
1140 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1142 pid, exit_status = os.waitpid(pid, 0)
1144 os.close(status_write)
1145 os.close(status_read)
1155 return output, status, exit_status
1157 ################################################################################
1159 def process_gpgv_output(status):
1160 # Process the status-fd output
1163 for line in status.split('\n'):
1167 split = line.split()
1169 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1171 (gnupg, keyword) = split[:2]
1172 if gnupg != "[GNUPG:]":
1173 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1176 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1177 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1180 keywords[keyword] = args
1182 return (keywords, internal_error)
1184 ################################################################################
1186 def retrieve_key (filename, keyserver=None, keyring=None):
1188 Retrieve the key that signed 'filename' from 'keyserver' and
1189 add it to 'keyring'. Returns nothing on success, or an error message
1193 # Defaults for keyserver and keyring
1195 keyserver = Cnf["Dinstall::KeyServer"]
1197 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1199 # Ensure the filename contains no shell meta-characters or other badness
1200 if not re_taint_free.match(filename):
1201 return "%s: tainted filename" % (filename)
1203 # Invoke gpgv on the file
1204 status_read, status_write = os.pipe()
1205 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1206 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1208 # Process the status-fd output
1209 (keywords, internal_error) = process_gpgv_output(status)
1211 return internal_error
1213 if not keywords.has_key("NO_PUBKEY"):
1214 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1216 fingerprint = keywords["NO_PUBKEY"][0]
1217 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1218 # it'll try to create a lockfile in /dev. A better solution might
1219 # be a tempfile or something.
1220 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1221 % (Cnf["Dinstall::SigningKeyring"])
1222 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1223 % (keyring, keyserver, fingerprint)
1224 (result, output) = commands.getstatusoutput(cmd)
1226 return "'%s' failed with exit code %s" % (cmd, result)
1230 ################################################################################
1232 def gpg_keyring_args(keyrings=None):
1234 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1236 return " ".join(["--keyring %s" % x for x in keyrings])
1238 ################################################################################
1240 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1242 Check the signature of a file and return the fingerprint if the
1243 signature is valid or 'None' if it's not. The first argument is the
1244 filename whose signature should be checked. The second argument is a
1245 reject function and is called when an error is found. The reject()
1246 function must allow for two arguments: the first is the error message,
1247 the second is an optional prefix string. It's possible for reject()
1248 to be called more than once during an invocation of check_signature().
1249 The third argument is optional and is the name of the files the
1250 detached signature applies to. The fourth argument is optional and is
1251 a *list* of keyrings to use. 'autofetch' can either be None, True or
1252 False. If None, the default behaviour specified in the config will be
1258 # Ensure the filename contains no shell meta-characters or other badness
1259 if not re_taint_free.match(sig_filename):
1260 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1261 return (None, rejects)
1263 if data_filename and not re_taint_free.match(data_filename):
1264 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1265 return (None, rejects)
1268 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1270 # Autofetch the signing key if that's enabled
1271 if autofetch == None:
1272 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1274 error_msg = retrieve_key(sig_filename)
1276 rejects.append(error_msg)
1277 return (None, rejects)
1279 # Build the command line
1280 status_read, status_write = os.pipe()
1281 cmd = "gpgv --status-fd %s %s %s %s" % (
1282 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1284 # Invoke gpgv on the file
1285 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1287 # Process the status-fd output
1288 (keywords, internal_error) = process_gpgv_output(status)
1290 # If we failed to parse the status-fd output, let's just whine and bail now
1292 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1293 rejects.append(internal_error, "")
1294 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1295 return (None, rejects)
1297 # Now check for obviously bad things in the processed output
1298 if keywords.has_key("KEYREVOKED"):
1299 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1300 if keywords.has_key("BADSIG"):
1301 rejects.append("bad signature on %s." % (sig_filename))
1302 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1303 rejects.append("failed to check signature on %s." % (sig_filename))
1304 if keywords.has_key("NO_PUBKEY"):
1305 args = keywords["NO_PUBKEY"]
1308 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1309 if keywords.has_key("BADARMOR"):
1310 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1311 if keywords.has_key("NODATA"):
1312 rejects.append("no signature found in %s." % (sig_filename))
1313 if keywords.has_key("EXPKEYSIG"):
1314 args = keywords["EXPKEYSIG"]
1317 rejects.append("Signature made by expired key 0x%s" % (key))
1318 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1319 args = keywords["KEYEXPIRED"]
1323 if timestamp.count("T") == 0:
1325 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1327 expiredate = "unknown (%s)" % (timestamp)
1329 expiredate = timestamp
1330 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1332 if len(rejects) > 0:
1333 return (None, rejects)
1335 # Next check gpgv exited with a zero return code
1337 rejects.append("gpgv failed while checking %s." % (sig_filename))
1339 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1341 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1342 return (None, rejects)
1344 # Sanity check the good stuff we expect
1345 if not keywords.has_key("VALIDSIG"):
1346 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1348 args = keywords["VALIDSIG"]
1350 rejects.append("internal error while checking signature on %s." % (sig_filename))
1352 fingerprint = args[0]
1353 if not keywords.has_key("GOODSIG"):
1354 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1355 if not keywords.has_key("SIG_ID"):
1356 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1358 # Finally ensure there's not something we don't recognise
1359 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1360 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1361 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1363 for keyword in keywords.keys():
1364 if not known_keywords.has_key(keyword):
1365 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1367 if len(rejects) > 0:
1368 return (None, rejects)
1370 return (fingerprint, [])
1372 ################################################################################
1374 def gpg_get_key_addresses(fingerprint):
1375 """retreive email addresses from gpg key uids for a given fingerprint"""
1376 addresses = key_uid_email_cache.get(fingerprint)
1377 if addresses != None:
1380 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1381 % (gpg_keyring_args(), fingerprint)
1382 (result, output) = commands.getstatusoutput(cmd)
1384 for l in output.split('\n'):
1385 m = re_gpg_uid.match(l)
1387 addresses.add(m.group(1))
1388 key_uid_email_cache[fingerprint] = addresses
1391 ################################################################################
1393 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1395 def wrap(paragraph, max_length, prefix=""):
1399 words = paragraph.split()
1402 word_size = len(word)
1403 if word_size > max_length:
1405 s += line + '\n' + prefix
1406 s += word + '\n' + prefix
1409 new_length = len(line) + word_size + 1
1410 if new_length > max_length:
1411 s += line + '\n' + prefix
1424 ################################################################################
1426 def clean_symlink (src, dest, root):
1428 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1431 src = src.replace(root, '', 1)
1432 dest = dest.replace(root, '', 1)
1433 dest = os.path.dirname(dest)
1434 new_src = '../' * len(dest.split('/'))
1435 return new_src + src
1437 ################################################################################
1439 def temp_filename(directory=None, prefix="dak", suffix=""):
1441 Return a secure and unique filename by pre-creating it.
1442 If 'directory' is non-null, it will be the directory the file is pre-created in.
1443 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1444 If 'suffix' is non-null, the filename will end with it.
1446 Returns a pair (fd, name).
1449 return tempfile.mkstemp(suffix, prefix, directory)
1451 ################################################################################
1453 def temp_dirname(parent=None, prefix="dak", suffix=""):
1455 Return a secure and unique directory by pre-creating it.
1456 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1457 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1458 If 'suffix' is non-null, the filename will end with it.
1460 Returns a pathname to the new directory
1463 return tempfile.mkdtemp(suffix, prefix, parent)
1465 ################################################################################
1467 def is_email_alias(email):
1468 """ checks if the user part of the email is listed in the alias file """
1470 if alias_cache == None:
1471 aliasfn = which_alias_file()
1474 for l in open(aliasfn):
1475 alias_cache.add(l.split(':')[0])
1476 uid = email.split('@')[0]
1477 return uid in alias_cache
1479 ################################################################################
1481 def get_changes_files(dir):
1483 Takes a directory and lists all .changes files in it (as well as chdir'ing
1484 to the directory; this is due to broken behaviour on the part of p-u/p-a
1485 when you're not in the right place)
1487 Returns a list of filenames
1490 # Much of the rest of p-u/p-a depends on being in the right place
1492 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1494 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1496 return changes_files
1498 ################################################################################
1502 Cnf = apt_pkg.newConfiguration()
1503 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1505 if which_conf_file() != default_config:
1506 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1508 ###############################################################################