2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, \
49 re_re_mark, re_whitespace_comment, re_issource
51 ################################################################################
53 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
54 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
56 alias_cache = None #: Cache for email alias checks
57 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
63 ################################################################################
66 """ Escape html chars """
67 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
69 ################################################################################
71 def open_file(filename, mode='r'):
73 Open C{file}, return fileobject.
75 @type filename: string
76 @param filename: path/filename to open
79 @param mode: open mode
82 @return: open fileobject
84 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
88 f = open(filename, mode)
90 raise CantOpenError, filename
93 ################################################################################
95 def our_raw_input(prompt=""):
97 sys.stdout.write(prompt)
103 sys.stderr.write("\nUser interrupt (^D).\n")
106 ################################################################################
108 def extract_component_from_section(section):
111 if section.find('/') != -1:
112 component = section.split('/')[0]
114 # Expand default component
116 if Cnf.has_key("Component::%s" % section):
121 return (section, component)
123 ################################################################################
125 def parse_deb822(contents, signing_rules=0):
129 # Split the lines in the input, keeping the linebreaks.
130 lines = contents.splitlines(True)
133 raise ParseChangesError, "[Empty changes file]"
135 # Reindex by line number so we can easily verify the format of
141 indexed_lines[index] = line[:-1]
145 num_of_lines = len(indexed_lines.keys())
148 while index < num_of_lines:
150 line = indexed_lines[index]
152 if signing_rules == 1:
154 if index > num_of_lines:
155 raise InvalidDscError, index
156 line = indexed_lines[index]
157 if not line.startswith("-----BEGIN PGP SIGNATURE"):
158 raise InvalidDscError, index
163 if line.startswith("-----BEGIN PGP SIGNATURE"):
165 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
167 if signing_rules == 1:
168 while index < num_of_lines and line != "":
170 line = indexed_lines[index]
172 # If we're not inside the signed data, don't process anything
173 if signing_rules >= 0 and not inside_signature:
175 slf = re_single_line_field.match(line)
177 field = slf.groups()[0].lower()
178 changes[field] = slf.groups()[1]
182 changes[field] += '\n'
184 mlf = re_multi_line_field.match(line)
187 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
188 if first == 1 and changes[field] != "":
189 changes[field] += '\n'
191 changes[field] += mlf.groups()[0] + '\n'
195 if signing_rules == 1 and inside_signature:
196 raise InvalidDscError, index
198 changes["filecontents"] = "".join(lines)
200 if changes.has_key("source"):
201 # Strip the source version in brackets from the source field,
202 # put it in the "source-version" field instead.
203 srcver = re_srchasver.search(changes["source"])
205 changes["source"] = srcver.group(1)
206 changes["source-version"] = srcver.group(2)
209 raise ParseChangesError, error
213 ################################################################################
215 def parse_changes(filename, signing_rules=0):
217 Parses a changes file and returns a dictionary where each field is a
218 key. The mandatory first argument is the filename of the .changes
221 signing_rules is an optional argument:
223 - If signing_rules == -1, no signature is required.
224 - If signing_rules == 0 (the default), a signature is required.
225 - If signing_rules == 1, it turns on the same strict format checking
228 The rules for (signing_rules == 1)-mode are:
230 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
231 followed by any PGP header data and must end with a blank line.
233 - The data section must end with a blank line and must be followed by
234 "-----BEGIN PGP SIGNATURE-----".
237 changes_in = open_file(filename)
238 content = changes_in.read()
241 unicode(content, 'utf-8')
243 raise ChangesUnicodeError, "Changes file not proper utf-8"
244 return parse_deb822(content, signing_rules)
246 ################################################################################
248 def hash_key(hashname):
249 return '%ssum' % hashname
251 ################################################################################
253 def create_hash(where, files, hashname, hashfunc):
255 create_hash extends the passed files dict with the given hash by
256 iterating over all files on disk and passing them to the hashing
261 for f in files.keys():
263 file_handle = open_file(f)
264 except CantOpenError:
265 rejmsg.append("Could not open file %s for checksumming" % (f))
268 files[f][hash_key(hashname)] = hashfunc(file_handle)
273 ################################################################################
275 def check_hash(where, files, hashname, hashfunc):
277 check_hash checks the given hash in the files dict against the actual
278 files on disk. The hash values need to be present consistently in
279 all file entries. It does not modify its input in any way.
283 for f in files.keys():
287 file_handle = open_file(f)
289 # Check for the hash entry, to not trigger a KeyError.
290 if not files[f].has_key(hash_key(hashname)):
291 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
295 # Actually check the hash for correctness.
296 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
297 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
299 except CantOpenError:
300 # TODO: This happens when the file is in the pool.
301 # warn("Cannot open file %s" % f)
308 ################################################################################
310 def check_size(where, files):
312 check_size checks the file sizes in the passed files dict against the
317 for f in files.keys():
322 # TODO: This happens when the file is in the pool.
326 actual_size = entry[stat.ST_SIZE]
327 size = int(files[f]["size"])
328 if size != actual_size:
329 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
330 % (f, actual_size, size, where))
333 ################################################################################
335 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
337 Verify that the files listed in the Files field of the .dsc are
338 those expected given the announced Format.
340 @type dsc_filename: string
341 @param dsc_filename: path of .dsc file
344 @param dsc: the content of the .dsc parsed by C{parse_changes()}
346 @type dsc_files: dict
347 @param dsc_files: the file list returned by C{build_file_list()}
350 @return: all errors detected
354 # Parse the file if needed
356 dsc = parse_changes(dsc_filename, signing_rules=1);
357 if dsc_files == None:
358 dsc_files = build_file_list(dsc, is_a_dsc=1)
360 # Ensure .dsc lists proper set of source files according to the format
363 has_native_tar_gz = 0
366 has_more_orig_tar = 0
369 for f in dsc_files.keys():
370 m = re_issource.match(f)
372 rejmsg.append("%s: %s in Files field not recognised as source."
376 if ftype == "orig.tar.gz":
379 elif ftype == "diff.gz":
381 elif ftype == "tar.gz":
382 has_native_tar_gz += 1
384 elif re.match(r"debian\.tar\.(gz|bz2|lzma)", ftype):
386 elif re.match(r"orig\.tar\.(gz|bz2|lzma)", ftype):
388 elif re.match(r"tar\.(gz|bz2|lzma)", ftype):
390 elif re.match(r"orig-.+\.tar\.(gz|bz2|lzma)", ftype):
391 has_more_orig_tar += 1
393 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
395 rejmsg.append("%s: lists multiple .orig tarballs." % (dsc_filename))
396 if has_native_tar > 1:
397 rejmsg.append("%s: lists multiple native tarballs." % (dsc_filename))
398 if has_debian_tar > 1 or has_debian_diff > 1:
399 rejmsg.append("%s: lists multiple debian diff/tarballs." % (dsc_filename))
400 if dsc["format"] == "1.0":
401 if not (has_native_tar_gz or (has_orig_tar_gz and has_debian_diff)):
402 rejmsg.append("%s: no .tar.gz or .orig.tar.gz+.diff.gz in "
403 "'Files' field." % (dsc_filename))
404 if (has_orig_tar_gz != has_orig_tar) or \
405 (has_native_tar_gz != has_native_tar) or \
406 has_debian_tar or has_more_orig_tar:
407 rejmsg.append("%s: contains source files not allowed in format 1.0"
409 elif re.match(r"3\.\d+ \(native\)", dsc["format"]):
410 if not has_native_tar:
411 rejmsg.append("%s: lack required files for format 3.x (native)."
413 if has_orig_tar or has_debian_diff or has_debian_tar or \
415 rejmsg.append("%s: contains source files not allowed in "
416 "format '3.x (native)'" % (dsc_filename))
417 elif re.match(r"3\.\d+ \(quilt\)", dsc["format"]):
418 if not(has_orig_tar and has_debian_tar):
419 rejmsg.append("%s: lack required files for format "
420 "'3.x (quilt)'." % (dsc_filename))
421 if has_debian_diff or has_native_tar:
422 rejmsg.append("%s: contains source files not allowed in format "
423 "3.x (quilt)" % (dsc_filename))
427 ################################################################################
429 def check_hash_fields(what, manifest):
431 check_hash_fields ensures that there are no checksum fields in the
432 given dict that we do not know about.
436 hashes = map(lambda x: x[0], known_hashes)
437 for field in manifest:
438 if field.startswith("checksums-"):
439 hashname = field.split("-",1)[1]
440 if hashname not in hashes:
441 rejmsg.append("Unsupported checksum field for %s "\
442 "in %s" % (hashname, what))
445 ################################################################################
447 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
448 if format >= version:
449 # The version should contain the specified hash.
452 # Import hashes from the changes
453 rejmsg = parse_checksums(".changes", files, changes, hashname)
457 # We need to calculate the hash because it can't possibly
460 return func(".changes", files, hashname, hashfunc)
462 # We could add the orig which might be in the pool to the files dict to
463 # access the checksums easily.
465 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
467 ensure_dsc_hashes' task is to ensure that each and every *present* hash
468 in the dsc is correct, i.e. identical to the changes file and if necessary
469 the pool. The latter task is delegated to check_hash.
473 if not dsc.has_key('Checksums-%s' % (hashname,)):
475 # Import hashes from the dsc
476 parse_checksums(".dsc", dsc_files, dsc, hashname)
478 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
481 ################################################################################
483 def parse_checksums(where, files, manifest, hashname):
485 field = 'checksums-%s' % hashname
486 if not field in manifest:
488 for line in manifest[field].split('\n'):
491 clist = line.strip().split(' ')
493 checksum, size, checkfile = clist
495 rejmsg.append("Cannot parse checksum line [%s]" % (line))
497 if not files.has_key(checkfile):
498 # TODO: check for the file's entry in the original files dict, not
499 # the one modified by (auto)byhand and other weird stuff
500 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
501 # (file, hashname, where))
503 if not files[checkfile]["size"] == size:
504 rejmsg.append("%s: size differs for files and checksums-%s entry "\
505 "in %s" % (checkfile, hashname, where))
507 files[checkfile][hash_key(hashname)] = checksum
508 for f in files.keys():
509 if not files[f].has_key(hash_key(hashname)):
510 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
514 ################################################################################
516 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
518 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
521 # Make sure we have a Files: field to parse...
522 if not changes.has_key(field):
523 raise NoFilesFieldError
525 # Make sure we recognise the format of the Files: field
526 format = re_verwithext.search(changes.get("format", "0.0"))
528 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
530 format = format.groups()
531 if format[1] == None:
532 format = int(float(format[0])), 0, format[2]
534 format = int(format[0]), int(format[1]), format[2]
535 if format[2] == None:
539 # format = (0,0) are missing format headers of which we still
540 # have some in the archive.
541 if format != (1,0) and format != (0,0) and \
542 format != (3,0,"quilt") and format != (3,0,"native"):
543 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
545 if (format < (1,5) or format > (1,8)):
546 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
547 if field != "files" and format < (1,8):
548 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
550 includes_section = (not is_a_dsc) and field == "files"
552 # Parse each entry/line:
553 for i in changes[field].split('\n'):
557 section = priority = ""
560 (md5, size, section, priority, name) = s
562 (md5, size, name) = s
564 raise ParseChangesError, i
571 (section, component) = extract_component_from_section(section)
573 files[name] = Dict(size=size, section=section,
574 priority=priority, component=component)
575 files[name][hashname] = md5
579 ################################################################################
581 def send_mail (message, filename=""):
582 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
584 # If we've been passed a string dump it into a temporary file
586 (fd, filename) = tempfile.mkstemp()
587 os.write (fd, message)
590 if Cnf.has_key("Dinstall::MailWhiteList") and \
591 Cnf["Dinstall::MailWhiteList"] != "":
592 message_in = open_file(filename)
593 message_raw = modemail.message_from_file(message_in)
597 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
599 for line in whitelist_in:
600 if not re_whitespace_comment.match(line):
601 if re_re_mark.match(line):
602 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
604 whitelist.append(re.compile(re.escape(line.strip())))
609 fields = ["To", "Bcc", "Cc"]
612 value = message_raw.get(field, None)
615 for item in value.split(","):
616 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
622 if not mail_whitelisted:
623 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
627 # Doesn't have any mail in whitelist so remove the header
629 del message_raw[field]
631 message_raw.replace_header(field, string.join(match, ", "))
633 # Change message fields in order if we don't have a To header
634 if not message_raw.has_key("To"):
637 if message_raw.has_key(field):
638 message_raw[fields[-1]] = message_raw[field]
639 del message_raw[field]
642 # Clean up any temporary files
643 # and return, as we removed all recipients.
645 os.unlink (filename);
648 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
649 os.write (fd, message_raw.as_string(True));
653 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
655 raise SendmailFailedError, output
657 # Clean up any temporary files
661 ################################################################################
663 def poolify (source, component):
666 if source[:3] == "lib":
667 return component + source[:4] + '/' + source + '/'
669 return component + source[:1] + '/' + source + '/'
671 ################################################################################
673 def move (src, dest, overwrite = 0, perms = 0664):
674 if os.path.exists(dest) and os.path.isdir(dest):
677 dest_dir = os.path.dirname(dest)
678 if not os.path.exists(dest_dir):
679 umask = os.umask(00000)
680 os.makedirs(dest_dir, 02775)
682 #print "Moving %s to %s..." % (src, dest)
683 if os.path.exists(dest) and os.path.isdir(dest):
684 dest += '/' + os.path.basename(src)
685 # Don't overwrite unless forced to
686 if os.path.exists(dest):
688 fubar("Can't move %s to %s - file already exists." % (src, dest))
690 if not os.access(dest, os.W_OK):
691 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
692 shutil.copy2(src, dest)
693 os.chmod(dest, perms)
696 def copy (src, dest, overwrite = 0, perms = 0664):
697 if os.path.exists(dest) and os.path.isdir(dest):
700 dest_dir = os.path.dirname(dest)
701 if not os.path.exists(dest_dir):
702 umask = os.umask(00000)
703 os.makedirs(dest_dir, 02775)
705 #print "Copying %s to %s..." % (src, dest)
706 if os.path.exists(dest) and os.path.isdir(dest):
707 dest += '/' + os.path.basename(src)
708 # Don't overwrite unless forced to
709 if os.path.exists(dest):
711 raise FileExistsError
713 if not os.access(dest, os.W_OK):
714 raise CantOverwriteError
715 shutil.copy2(src, dest)
716 os.chmod(dest, perms)
718 ################################################################################
721 res = socket.gethostbyaddr(socket.gethostname())
722 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
723 if database_hostname:
724 return database_hostname
728 def which_conf_file ():
729 res = socket.gethostbyaddr(socket.gethostname())
730 # In case we allow local config files per user, try if one exists
731 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
732 homedir = os.getenv("HOME")
733 confpath = os.path.join(homedir, "/etc/dak.conf")
734 if os.path.exists(confpath):
735 apt_pkg.ReadConfigFileISC(Cnf,default_config)
737 # We are still in here, so there is no local config file or we do
738 # not allow local files. Do the normal stuff.
739 if Cnf.get("Config::" + res[0] + "::DakConfig"):
740 return Cnf["Config::" + res[0] + "::DakConfig"]
742 return default_config
744 def which_apt_conf_file ():
745 res = socket.gethostbyaddr(socket.gethostname())
746 # In case we allow local config files per user, try if one exists
747 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
748 homedir = os.getenv("HOME")
749 confpath = os.path.join(homedir, "/etc/dak.conf")
750 if os.path.exists(confpath):
751 apt_pkg.ReadConfigFileISC(Cnf,default_config)
753 if Cnf.get("Config::" + res[0] + "::AptConfig"):
754 return Cnf["Config::" + res[0] + "::AptConfig"]
756 return default_apt_config
758 def which_alias_file():
759 hostname = socket.gethostbyaddr(socket.gethostname())[0]
760 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
761 if os.path.exists(aliasfn):
766 ################################################################################
768 def TemplateSubst(map, filename):
769 """ Perform a substition of template """
770 templatefile = open_file(filename)
771 template = templatefile.read()
773 template = template.replace(x, str(map[x]))
777 ################################################################################
779 def fubar(msg, exit_code=1):
780 sys.stderr.write("E: %s\n" % (msg))
784 sys.stderr.write("W: %s\n" % (msg))
786 ################################################################################
788 # Returns the user name with a laughable attempt at rfc822 conformancy
789 # (read: removing stray periods).
791 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
794 return pwd.getpwuid(os.getuid())[0]
796 ################################################################################
806 return ("%d%s" % (c, t))
808 ################################################################################
810 def cc_fix_changes (changes):
811 o = changes.get("architecture", "")
813 del changes["architecture"]
814 changes["architecture"] = {}
816 changes["architecture"][j] = 1
818 def changes_compare (a, b):
819 """ Sort by source name, source version, 'have source', and then by filename """
821 a_changes = parse_changes(a)
826 b_changes = parse_changes(b)
830 cc_fix_changes (a_changes)
831 cc_fix_changes (b_changes)
833 # Sort by source name
834 a_source = a_changes.get("source")
835 b_source = b_changes.get("source")
836 q = cmp (a_source, b_source)
840 # Sort by source version
841 a_version = a_changes.get("version", "0")
842 b_version = b_changes.get("version", "0")
843 q = apt_pkg.VersionCompare(a_version, b_version)
847 # Sort by 'have source'
848 a_has_source = a_changes["architecture"].get("source")
849 b_has_source = b_changes["architecture"].get("source")
850 if a_has_source and not b_has_source:
852 elif b_has_source and not a_has_source:
855 # Fall back to sort by filename
858 ################################################################################
860 def find_next_free (dest, too_many=100):
863 while os.path.exists(dest) and extra < too_many:
864 dest = orig_dest + '.' + repr(extra)
866 if extra >= too_many:
867 raise NoFreeFilenameError
870 ################################################################################
872 def result_join (original, sep = '\t'):
874 for i in xrange(len(original)):
875 if original[i] == None:
876 resultlist.append("")
878 resultlist.append(original[i])
879 return sep.join(resultlist)
881 ################################################################################
883 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
885 for line in str.split('\n'):
887 if line or include_blank_lines:
888 out += "%s%s\n" % (prefix, line)
889 # Strip trailing new line
894 ################################################################################
896 def validate_changes_file_arg(filename, require_changes=1):
898 'filename' is either a .changes or .dak file. If 'filename' is a
899 .dak file, it's changed to be the corresponding .changes file. The
900 function then checks if the .changes file a) exists and b) is
901 readable and returns the .changes filename if so. If there's a
902 problem, the next action depends on the option 'require_changes'
905 - If 'require_changes' == -1, errors are ignored and the .changes
906 filename is returned.
907 - If 'require_changes' == 0, a warning is given and 'None' is returned.
908 - If 'require_changes' == 1, a fatal error is raised.
913 orig_filename = filename
914 if filename.endswith(".dak"):
915 filename = filename[:-4]+".changes"
917 if not filename.endswith(".changes"):
918 error = "invalid file type; not a changes file"
920 if not os.access(filename,os.R_OK):
921 if os.path.exists(filename):
922 error = "permission denied"
924 error = "file not found"
927 if require_changes == 1:
928 fubar("%s: %s." % (orig_filename, error))
929 elif require_changes == 0:
930 warn("Skipping %s - %s" % (orig_filename, error))
932 else: # We only care about the .dak file
937 ################################################################################
940 return (arch != "source" and arch != "all")
942 ################################################################################
944 def join_with_commas_and(list):
945 if len(list) == 0: return "nothing"
946 if len(list) == 1: return list[0]
947 return ", ".join(list[:-1]) + " and " + list[-1]
949 ################################################################################
954 (pkg, version, constraint) = atom
956 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
959 pp_deps.append(pp_dep)
960 return " |".join(pp_deps)
962 ################################################################################
967 ################################################################################
969 def parse_args(Options):
970 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
971 # XXX: This should go away and everything which calls it be converted
972 # to use SQLA properly. For now, we'll just fix it not to use
973 # the old Pg interface though
974 session = DBConn().session()
978 for suitename in split_args(Options["Suite"]):
979 suite = get_suite(suitename, session=session)
980 if suite.suite_id is None:
981 warn("suite '%s' not recognised." % (suite.suite_name))
983 suite_ids_list.append(suite.suite_id)
985 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
987 fubar("No valid suite given.")
992 if Options["Component"]:
993 component_ids_list = []
994 for componentname in split_args(Options["Component"]):
995 component = get_component(componentname, session=session)
996 if component is None:
997 warn("component '%s' not recognised." % (componentname))
999 component_ids_list.append(component.component_id)
1000 if component_ids_list:
1001 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1003 fubar("No valid component given.")
1007 # Process architecture
1008 con_architectures = ""
1010 if Options["Architecture"]:
1012 for archname in split_args(Options["Architecture"]):
1013 if archname == "source":
1016 arch = get_architecture(archname, session=session)
1018 warn("architecture '%s' not recognised." % (archname))
1020 arch_ids_list.append(arch.arch_id)
1022 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1024 if not check_source:
1025 fubar("No valid architecture given.")
1029 return (con_suites, con_architectures, con_components, check_source)
1031 ################################################################################
1033 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1034 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1037 tb = sys.exc_info()[2]
1044 frame = frame.f_back
1046 traceback.print_exc()
1048 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1049 frame.f_code.co_filename,
1051 for key, value in frame.f_locals.items():
1052 print "\t%20s = " % key,
1056 print "<unable to print>"
1058 ################################################################################
1060 def try_with_debug(function):
1068 ################################################################################
1070 def arch_compare_sw (a, b):
1072 Function for use in sorting lists of architectures.
1074 Sorts normally except that 'source' dominates all others.
1077 if a == "source" and b == "source":
1086 ################################################################################
1088 def split_args (s, dwim=1):
1090 Split command line arguments which can be separated by either commas
1091 or whitespace. If dwim is set, it will complain about string ending
1092 in comma since this usually means someone did 'dak ls -a i386, m68k
1093 foo' or something and the inevitable confusion resulting from 'm68k'
1094 being treated as an argument is undesirable.
1097 if s.find(",") == -1:
1100 if s[-1:] == "," and dwim:
1101 fubar("split_args: found trailing comma, spurious space maybe?")
1104 ################################################################################
1106 def Dict(**dict): return dict
1108 ########################################
1110 def gpgv_get_status_output(cmd, status_read, status_write):
1112 Our very own version of commands.getouputstatus(), hacked to support
1116 cmd = ['/bin/sh', '-c', cmd]
1117 p2cread, p2cwrite = os.pipe()
1118 c2pread, c2pwrite = os.pipe()
1119 errout, errin = os.pipe()
1129 for i in range(3, 256):
1130 if i != status_write:
1136 os.execvp(cmd[0], cmd)
1142 os.dup2(c2pread, c2pwrite)
1143 os.dup2(errout, errin)
1145 output = status = ""
1147 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1150 r = os.read(fd, 8196)
1152 more_data.append(fd)
1153 if fd == c2pwrite or fd == errin:
1155 elif fd == status_read:
1158 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1160 pid, exit_status = os.waitpid(pid, 0)
1162 os.close(status_write)
1163 os.close(status_read)
1173 return output, status, exit_status
1175 ################################################################################
1177 def process_gpgv_output(status):
1178 # Process the status-fd output
1181 for line in status.split('\n'):
1185 split = line.split()
1187 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1189 (gnupg, keyword) = split[:2]
1190 if gnupg != "[GNUPG:]":
1191 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1194 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1195 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1198 keywords[keyword] = args
1200 return (keywords, internal_error)
1202 ################################################################################
1204 def retrieve_key (filename, keyserver=None, keyring=None):
1206 Retrieve the key that signed 'filename' from 'keyserver' and
1207 add it to 'keyring'. Returns nothing on success, or an error message
1211 # Defaults for keyserver and keyring
1213 keyserver = Cnf["Dinstall::KeyServer"]
1215 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1217 # Ensure the filename contains no shell meta-characters or other badness
1218 if not re_taint_free.match(filename):
1219 return "%s: tainted filename" % (filename)
1221 # Invoke gpgv on the file
1222 status_read, status_write = os.pipe()
1223 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1224 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1226 # Process the status-fd output
1227 (keywords, internal_error) = process_gpgv_output(status)
1229 return internal_error
1231 if not keywords.has_key("NO_PUBKEY"):
1232 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1234 fingerprint = keywords["NO_PUBKEY"][0]
1235 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1236 # it'll try to create a lockfile in /dev. A better solution might
1237 # be a tempfile or something.
1238 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1239 % (Cnf["Dinstall::SigningKeyring"])
1240 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1241 % (keyring, keyserver, fingerprint)
1242 (result, output) = commands.getstatusoutput(cmd)
1244 return "'%s' failed with exit code %s" % (cmd, result)
1248 ################################################################################
1250 def gpg_keyring_args(keyrings=None):
1252 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1254 return " ".join(["--keyring %s" % x for x in keyrings])
1256 ################################################################################
1258 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1260 Check the signature of a file and return the fingerprint if the
1261 signature is valid or 'None' if it's not. The first argument is the
1262 filename whose signature should be checked. The second argument is a
1263 reject function and is called when an error is found. The reject()
1264 function must allow for two arguments: the first is the error message,
1265 the second is an optional prefix string. It's possible for reject()
1266 to be called more than once during an invocation of check_signature().
1267 The third argument is optional and is the name of the files the
1268 detached signature applies to. The fourth argument is optional and is
1269 a *list* of keyrings to use. 'autofetch' can either be None, True or
1270 False. If None, the default behaviour specified in the config will be
1276 # Ensure the filename contains no shell meta-characters or other badness
1277 if not re_taint_free.match(sig_filename):
1278 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1279 return (None, rejects)
1281 if data_filename and not re_taint_free.match(data_filename):
1282 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1283 return (None, rejects)
1286 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1288 # Autofetch the signing key if that's enabled
1289 if autofetch == None:
1290 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1292 error_msg = retrieve_key(sig_filename)
1294 rejects.append(error_msg)
1295 return (None, rejects)
1297 # Build the command line
1298 status_read, status_write = os.pipe()
1299 cmd = "gpgv --status-fd %s %s %s %s" % (
1300 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1302 # Invoke gpgv on the file
1303 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1305 # Process the status-fd output
1306 (keywords, internal_error) = process_gpgv_output(status)
1308 # If we failed to parse the status-fd output, let's just whine and bail now
1310 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1311 rejects.append(internal_error, "")
1312 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1313 return (None, rejects)
1315 # Now check for obviously bad things in the processed output
1316 if keywords.has_key("KEYREVOKED"):
1317 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1318 if keywords.has_key("BADSIG"):
1319 rejects.append("bad signature on %s." % (sig_filename))
1320 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1321 rejects.append("failed to check signature on %s." % (sig_filename))
1322 if keywords.has_key("NO_PUBKEY"):
1323 args = keywords["NO_PUBKEY"]
1326 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1327 if keywords.has_key("BADARMOR"):
1328 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1329 if keywords.has_key("NODATA"):
1330 rejects.append("no signature found in %s." % (sig_filename))
1331 if keywords.has_key("EXPKEYSIG"):
1332 args = keywords["EXPKEYSIG"]
1335 rejects.append("Signature made by expired key 0x%s" % (key))
1336 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1337 args = keywords["KEYEXPIRED"]
1341 if timestamp.count("T") == 0:
1343 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1345 expiredate = "unknown (%s)" % (timestamp)
1347 expiredate = timestamp
1348 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1350 if len(rejects) > 0:
1351 return (None, rejects)
1353 # Next check gpgv exited with a zero return code
1355 rejects.append("gpgv failed while checking %s." % (sig_filename))
1357 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1359 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1360 return (None, rejects)
1362 # Sanity check the good stuff we expect
1363 if not keywords.has_key("VALIDSIG"):
1364 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1366 args = keywords["VALIDSIG"]
1368 rejects.append("internal error while checking signature on %s." % (sig_filename))
1370 fingerprint = args[0]
1371 if not keywords.has_key("GOODSIG"):
1372 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1373 if not keywords.has_key("SIG_ID"):
1374 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1376 # Finally ensure there's not something we don't recognise
1377 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1378 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1379 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1381 for keyword in keywords.keys():
1382 if not known_keywords.has_key(keyword):
1383 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1385 if len(rejects) > 0:
1386 return (None, rejects)
1388 return (fingerprint, [])
1390 ################################################################################
1392 def gpg_get_key_addresses(fingerprint):
1393 """retreive email addresses from gpg key uids for a given fingerprint"""
1394 addresses = key_uid_email_cache.get(fingerprint)
1395 if addresses != None:
1398 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1399 % (gpg_keyring_args(), fingerprint)
1400 (result, output) = commands.getstatusoutput(cmd)
1402 for l in output.split('\n'):
1403 m = re_gpg_uid.match(l)
1405 addresses.add(m.group(1))
1406 key_uid_email_cache[fingerprint] = addresses
1409 ################################################################################
1411 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1413 def wrap(paragraph, max_length, prefix=""):
1417 words = paragraph.split()
1420 word_size = len(word)
1421 if word_size > max_length:
1423 s += line + '\n' + prefix
1424 s += word + '\n' + prefix
1427 new_length = len(line) + word_size + 1
1428 if new_length > max_length:
1429 s += line + '\n' + prefix
1442 ################################################################################
1444 def clean_symlink (src, dest, root):
1446 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1449 src = src.replace(root, '', 1)
1450 dest = dest.replace(root, '', 1)
1451 dest = os.path.dirname(dest)
1452 new_src = '../' * len(dest.split('/'))
1453 return new_src + src
1455 ################################################################################
1457 def temp_filename(directory=None, prefix="dak", suffix=""):
1459 Return a secure and unique filename by pre-creating it.
1460 If 'directory' is non-null, it will be the directory the file is pre-created in.
1461 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1462 If 'suffix' is non-null, the filename will end with it.
1464 Returns a pair (fd, name).
1467 return tempfile.mkstemp(suffix, prefix, directory)
1469 ################################################################################
1471 def temp_dirname(parent=None, prefix="dak", suffix=""):
1473 Return a secure and unique directory by pre-creating it.
1474 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1475 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1476 If 'suffix' is non-null, the filename will end with it.
1478 Returns a pathname to the new directory
1481 return tempfile.mkdtemp(suffix, prefix, parent)
1483 ################################################################################
1485 def is_email_alias(email):
1486 """ checks if the user part of the email is listed in the alias file """
1488 if alias_cache == None:
1489 aliasfn = which_alias_file()
1492 for l in open(aliasfn):
1493 alias_cache.add(l.split(':')[0])
1494 uid = email.split('@')[0]
1495 return uid in alias_cache
1497 ################################################################################
1499 def get_changes_files(dir):
1501 Takes a directory and lists all .changes files in it (as well as chdir'ing
1502 to the directory; this is due to broken behaviour on the part of p-u/p-a
1503 when you're not in the right place)
1505 Returns a list of filenames
1508 # Much of the rest of p-u/p-a depends on being in the right place
1510 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1512 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1514 return changes_files
1516 ################################################################################
1520 Cnf = apt_pkg.newConfiguration()
1521 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1523 if which_conf_file() != default_config:
1524 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1526 ###############################################################################