2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite
43 from dak_exceptions import *
44 from textutils import fix_maintainer
45 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
46 re_multi_line_field, re_srchasver, re_taint_free, \
47 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
50 from formats import parse_format, validate_changes_format
51 from srcformats import get_format_from_string
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
67 # code in lenny's Python. This also affects commands.getoutput and
69 def dak_getstatusoutput(cmd):
70 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
71 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
73 output = "".join(pipe.stdout.readlines())
75 if output[-1:] == '\n':
83 commands.getstatusoutput = dak_getstatusoutput
85 ################################################################################
88 """ Escape html chars """
89 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
91 ################################################################################
93 def open_file(filename, mode='r'):
95 Open C{file}, return fileobject.
97 @type filename: string
98 @param filename: path/filename to open
101 @param mode: open mode
104 @return: open fileobject
106 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
110 f = open(filename, mode)
112 raise CantOpenError, filename
115 ################################################################################
117 def our_raw_input(prompt=""):
121 sys.stdout.write(prompt)
130 sys.stderr.write("\nUser interrupt (^D).\n")
133 ################################################################################
135 def extract_component_from_section(section):
138 if section.find('/') != -1:
139 component = section.split('/')[0]
141 # Expand default component
143 if Cnf.has_key("Component::%s" % section):
148 return (section, component)
150 ################################################################################
152 def parse_deb822(contents, signing_rules=0):
156 # Split the lines in the input, keeping the linebreaks.
157 lines = contents.splitlines(True)
160 raise ParseChangesError, "[Empty changes file]"
162 # Reindex by line number so we can easily verify the format of
168 indexed_lines[index] = line[:-1]
172 num_of_lines = len(indexed_lines.keys())
175 while index < num_of_lines:
177 line = indexed_lines[index]
179 if signing_rules == 1:
181 if index > num_of_lines:
182 raise InvalidDscError, index
183 line = indexed_lines[index]
184 if not line.startswith("-----BEGIN PGP SIGNATURE"):
185 raise InvalidDscError, index
190 if line.startswith("-----BEGIN PGP SIGNATURE"):
192 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
194 if signing_rules == 1:
195 while index < num_of_lines and line != "":
197 line = indexed_lines[index]
199 # If we're not inside the signed data, don't process anything
200 if signing_rules >= 0 and not inside_signature:
202 slf = re_single_line_field.match(line)
204 field = slf.groups()[0].lower()
205 changes[field] = slf.groups()[1]
209 changes[field] += '\n'
211 mlf = re_multi_line_field.match(line)
214 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
215 if first == 1 and changes[field] != "":
216 changes[field] += '\n'
218 changes[field] += mlf.groups()[0] + '\n'
222 if signing_rules == 1 and inside_signature:
223 raise InvalidDscError, index
225 changes["filecontents"] = "".join(lines)
227 if changes.has_key("source"):
228 # Strip the source version in brackets from the source field,
229 # put it in the "source-version" field instead.
230 srcver = re_srchasver.search(changes["source"])
232 changes["source"] = srcver.group(1)
233 changes["source-version"] = srcver.group(2)
236 raise ParseChangesError, error
240 ################################################################################
242 def parse_changes(filename, signing_rules=0):
244 Parses a changes file and returns a dictionary where each field is a
245 key. The mandatory first argument is the filename of the .changes
248 signing_rules is an optional argument:
250 - If signing_rules == -1, no signature is required.
251 - If signing_rules == 0 (the default), a signature is required.
252 - If signing_rules == 1, it turns on the same strict format checking
255 The rules for (signing_rules == 1)-mode are:
257 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
258 followed by any PGP header data and must end with a blank line.
260 - The data section must end with a blank line and must be followed by
261 "-----BEGIN PGP SIGNATURE-----".
264 changes_in = open_file(filename)
265 content = changes_in.read()
268 unicode(content, 'utf-8')
270 raise ChangesUnicodeError, "Changes file not proper utf-8"
271 changes = parse_deb822(content, signing_rules)
273 # Finally ensure that everything needed is there
274 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
275 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
278 for keyword in must_keywords:
279 if not changes.has_key(keyword.lower()):
280 missingfields.append(keyword)
282 if len(missingfields):
283 raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)
287 ################################################################################
289 def hash_key(hashname):
290 return '%ssum' % hashname
292 ################################################################################
294 def create_hash(where, files, hashname, hashfunc):
296 create_hash extends the passed files dict with the given hash by
297 iterating over all files on disk and passing them to the hashing
302 for f in files.keys():
304 file_handle = open_file(f)
305 except CantOpenError:
306 rejmsg.append("Could not open file %s for checksumming" % (f))
309 files[f][hash_key(hashname)] = hashfunc(file_handle)
314 ################################################################################
316 def check_hash(where, files, hashname, hashfunc):
318 check_hash checks the given hash in the files dict against the actual
319 files on disk. The hash values need to be present consistently in
320 all file entries. It does not modify its input in any way.
324 for f in files.keys():
328 file_handle = open_file(f)
330 # Check for the hash entry, to not trigger a KeyError.
331 if not files[f].has_key(hash_key(hashname)):
332 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
336 # Actually check the hash for correctness.
337 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
338 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
340 except CantOpenError:
341 # TODO: This happens when the file is in the pool.
342 # warn("Cannot open file %s" % f)
349 ################################################################################
351 def check_size(where, files):
353 check_size checks the file sizes in the passed files dict against the
358 for f in files.keys():
363 # TODO: This happens when the file is in the pool.
367 actual_size = entry[stat.ST_SIZE]
368 size = int(files[f]["size"])
369 if size != actual_size:
370 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
371 % (f, actual_size, size, where))
374 ################################################################################
376 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
378 Verify that the files listed in the Files field of the .dsc are
379 those expected given the announced Format.
381 @type dsc_filename: string
382 @param dsc_filename: path of .dsc file
385 @param dsc: the content of the .dsc parsed by C{parse_changes()}
387 @type dsc_files: dict
388 @param dsc_files: the file list returned by C{build_file_list()}
391 @return: all errors detected
395 # Parse the file if needed
397 dsc = parse_changes(dsc_filename, signing_rules=1);
399 if dsc_files is None:
400 dsc_files = build_file_list(dsc, is_a_dsc=1)
402 # Ensure .dsc lists proper set of source files according to the format
404 has = defaultdict(lambda: 0)
407 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
408 (r'diff.gz', ('debian_diff',)),
409 (r'tar.gz', ('native_tar_gz', 'native_tar')),
410 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
411 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
412 (r'tar\.(gz|bz2)', ('native_tar',)),
413 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
416 for f in dsc_files.keys():
417 m = re_issource.match(f)
419 rejmsg.append("%s: %s in Files field not recognised as source."
423 # Populate 'has' dictionary by resolving keys in lookup table
425 for regex, keys in ftype_lookup:
426 if re.match(regex, m.group(3)):
432 # File does not match anything in lookup table; reject
434 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
436 # Check for multiple files
437 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
438 if has[file_type] > 1:
439 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
441 # Source format specific tests
443 format = get_format_from_string(dsc['format'])
445 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
448 except UnknownFormatError:
449 # Not an error here for now
454 ################################################################################
456 def check_hash_fields(what, manifest):
458 check_hash_fields ensures that there are no checksum fields in the
459 given dict that we do not know about.
463 hashes = map(lambda x: x[0], known_hashes)
464 for field in manifest:
465 if field.startswith("checksums-"):
466 hashname = field.split("-",1)[1]
467 if hashname not in hashes:
468 rejmsg.append("Unsupported checksum field for %s "\
469 "in %s" % (hashname, what))
472 ################################################################################
474 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
475 if format >= version:
476 # The version should contain the specified hash.
479 # Import hashes from the changes
480 rejmsg = parse_checksums(".changes", files, changes, hashname)
484 # We need to calculate the hash because it can't possibly
487 return func(".changes", files, hashname, hashfunc)
489 # We could add the orig which might be in the pool to the files dict to
490 # access the checksums easily.
492 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
494 ensure_dsc_hashes' task is to ensure that each and every *present* hash
495 in the dsc is correct, i.e. identical to the changes file and if necessary
496 the pool. The latter task is delegated to check_hash.
500 if not dsc.has_key('Checksums-%s' % (hashname,)):
502 # Import hashes from the dsc
503 parse_checksums(".dsc", dsc_files, dsc, hashname)
505 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
508 ################################################################################
510 def parse_checksums(where, files, manifest, hashname):
512 field = 'checksums-%s' % hashname
513 if not field in manifest:
515 for line in manifest[field].split('\n'):
518 clist = line.strip().split(' ')
520 checksum, size, checkfile = clist
522 rejmsg.append("Cannot parse checksum line [%s]" % (line))
524 if not files.has_key(checkfile):
525 # TODO: check for the file's entry in the original files dict, not
526 # the one modified by (auto)byhand and other weird stuff
527 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
528 # (file, hashname, where))
530 if not files[checkfile]["size"] == size:
531 rejmsg.append("%s: size differs for files and checksums-%s entry "\
532 "in %s" % (checkfile, hashname, where))
534 files[checkfile][hash_key(hashname)] = checksum
535 for f in files.keys():
536 if not files[f].has_key(hash_key(hashname)):
537 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
541 ################################################################################
543 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
545 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
548 # Make sure we have a Files: field to parse...
549 if not changes.has_key(field):
550 raise NoFilesFieldError
552 # Validate .changes Format: field
554 validate_changes_format(parse_format(changes['format']), field)
556 includes_section = (not is_a_dsc) and field == "files"
558 # Parse each entry/line:
559 for i in changes[field].split('\n'):
563 section = priority = ""
566 (md5, size, section, priority, name) = s
568 (md5, size, name) = s
570 raise ParseChangesError, i
577 (section, component) = extract_component_from_section(section)
579 files[name] = dict(size=size, section=section,
580 priority=priority, component=component)
581 files[name][hashname] = md5
585 ################################################################################
587 def send_mail (message, filename=""):
588 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
590 # Check whether we're supposed to be sending mail
591 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
594 # If we've been passed a string dump it into a temporary file
596 (fd, filename) = tempfile.mkstemp()
597 os.write (fd, message)
600 if Cnf.has_key("Dinstall::MailWhiteList") and \
601 Cnf["Dinstall::MailWhiteList"] != "":
602 message_in = open_file(filename)
603 message_raw = modemail.message_from_file(message_in)
607 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
609 for line in whitelist_in:
610 if not re_whitespace_comment.match(line):
611 if re_re_mark.match(line):
612 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
614 whitelist.append(re.compile(re.escape(line.strip())))
619 fields = ["To", "Bcc", "Cc"]
622 value = message_raw.get(field, None)
625 for item in value.split(","):
626 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
632 if not mail_whitelisted:
633 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
637 # Doesn't have any mail in whitelist so remove the header
639 del message_raw[field]
641 message_raw.replace_header(field, ', '.join(match))
643 # Change message fields in order if we don't have a To header
644 if not message_raw.has_key("To"):
647 if message_raw.has_key(field):
648 message_raw[fields[-1]] = message_raw[field]
649 del message_raw[field]
652 # Clean up any temporary files
653 # and return, as we removed all recipients.
655 os.unlink (filename);
658 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
659 os.write (fd, message_raw.as_string(True));
663 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
665 raise SendmailFailedError, output
667 # Clean up any temporary files
671 ################################################################################
673 def poolify (source, component):
676 if source[:3] == "lib":
677 return component + source[:4] + '/' + source + '/'
679 return component + source[:1] + '/' + source + '/'
681 ################################################################################
683 def move (src, dest, overwrite = 0, perms = 0664):
684 if os.path.exists(dest) and os.path.isdir(dest):
687 dest_dir = os.path.dirname(dest)
688 if not os.path.exists(dest_dir):
689 umask = os.umask(00000)
690 os.makedirs(dest_dir, 02775)
692 #print "Moving %s to %s..." % (src, dest)
693 if os.path.exists(dest) and os.path.isdir(dest):
694 dest += '/' + os.path.basename(src)
695 # Don't overwrite unless forced to
696 if os.path.exists(dest):
698 fubar("Can't move %s to %s - file already exists." % (src, dest))
700 if not os.access(dest, os.W_OK):
701 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
702 shutil.copy2(src, dest)
703 os.chmod(dest, perms)
706 def copy (src, dest, overwrite = 0, perms = 0664):
707 if os.path.exists(dest) and os.path.isdir(dest):
710 dest_dir = os.path.dirname(dest)
711 if not os.path.exists(dest_dir):
712 umask = os.umask(00000)
713 os.makedirs(dest_dir, 02775)
715 #print "Copying %s to %s..." % (src, dest)
716 if os.path.exists(dest) and os.path.isdir(dest):
717 dest += '/' + os.path.basename(src)
718 # Don't overwrite unless forced to
719 if os.path.exists(dest):
721 raise FileExistsError
723 if not os.access(dest, os.W_OK):
724 raise CantOverwriteError
725 shutil.copy2(src, dest)
726 os.chmod(dest, perms)
728 ################################################################################
731 res = socket.gethostbyaddr(socket.gethostname())
732 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
733 if database_hostname:
734 return database_hostname
738 def which_conf_file ():
739 if os.getenv('DAK_CONFIG'):
740 return os.getenv('DAK_CONFIG')
742 res = socket.gethostbyaddr(socket.gethostname())
743 # In case we allow local config files per user, try if one exists
744 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
745 homedir = os.getenv("HOME")
746 confpath = os.path.join(homedir, "/etc/dak.conf")
747 if os.path.exists(confpath):
748 apt_pkg.ReadConfigFileISC(Cnf,default_config)
750 # We are still in here, so there is no local config file or we do
751 # not allow local files. Do the normal stuff.
752 if Cnf.get("Config::" + res[0] + "::DakConfig"):
753 return Cnf["Config::" + res[0] + "::DakConfig"]
755 return default_config
757 def which_apt_conf_file ():
758 res = socket.gethostbyaddr(socket.gethostname())
759 # In case we allow local config files per user, try if one exists
760 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
761 homedir = os.getenv("HOME")
762 confpath = os.path.join(homedir, "/etc/dak.conf")
763 if os.path.exists(confpath):
764 apt_pkg.ReadConfigFileISC(Cnf,default_config)
766 if Cnf.get("Config::" + res[0] + "::AptConfig"):
767 return Cnf["Config::" + res[0] + "::AptConfig"]
769 return default_apt_config
771 def which_alias_file():
772 hostname = socket.gethostbyaddr(socket.gethostname())[0]
773 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
774 if os.path.exists(aliasfn):
779 ################################################################################
781 def TemplateSubst(subst_map, filename):
782 """ Perform a substition of template """
783 templatefile = open_file(filename)
784 template = templatefile.read()
785 for k, v in subst_map.iteritems():
786 template = template.replace(k, str(v))
790 ################################################################################
792 def fubar(msg, exit_code=1):
793 sys.stderr.write("E: %s\n" % (msg))
797 sys.stderr.write("W: %s\n" % (msg))
799 ################################################################################
801 # Returns the user name with a laughable attempt at rfc822 conformancy
802 # (read: removing stray periods).
804 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
807 return pwd.getpwuid(os.getuid())[0]
809 ################################################################################
819 return ("%d%s" % (c, t))
821 ################################################################################
823 def cc_fix_changes (changes):
824 o = changes.get("architecture", "")
826 del changes["architecture"]
827 changes["architecture"] = {}
829 changes["architecture"][j] = 1
831 def changes_compare (a, b):
832 """ Sort by source name, source version, 'have source', and then by filename """
834 a_changes = parse_changes(a)
839 b_changes = parse_changes(b)
843 cc_fix_changes (a_changes)
844 cc_fix_changes (b_changes)
846 # Sort by source name
847 a_source = a_changes.get("source")
848 b_source = b_changes.get("source")
849 q = cmp (a_source, b_source)
853 # Sort by source version
854 a_version = a_changes.get("version", "0")
855 b_version = b_changes.get("version", "0")
856 q = apt_pkg.VersionCompare(a_version, b_version)
860 # Sort by 'have source'
861 a_has_source = a_changes["architecture"].get("source")
862 b_has_source = b_changes["architecture"].get("source")
863 if a_has_source and not b_has_source:
865 elif b_has_source and not a_has_source:
868 # Fall back to sort by filename
871 ################################################################################
873 def find_next_free (dest, too_many=100):
876 while os.path.exists(dest) and extra < too_many:
877 dest = orig_dest + '.' + repr(extra)
879 if extra >= too_many:
880 raise NoFreeFilenameError
883 ################################################################################
885 def result_join (original, sep = '\t'):
887 for i in xrange(len(original)):
888 if original[i] == None:
889 resultlist.append("")
891 resultlist.append(original[i])
892 return sep.join(resultlist)
894 ################################################################################
896 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
898 for line in str.split('\n'):
900 if line or include_blank_lines:
901 out += "%s%s\n" % (prefix, line)
902 # Strip trailing new line
907 ################################################################################
909 def validate_changes_file_arg(filename, require_changes=1):
911 'filename' is either a .changes or .dak file. If 'filename' is a
912 .dak file, it's changed to be the corresponding .changes file. The
913 function then checks if the .changes file a) exists and b) is
914 readable and returns the .changes filename if so. If there's a
915 problem, the next action depends on the option 'require_changes'
918 - If 'require_changes' == -1, errors are ignored and the .changes
919 filename is returned.
920 - If 'require_changes' == 0, a warning is given and 'None' is returned.
921 - If 'require_changes' == 1, a fatal error is raised.
926 orig_filename = filename
927 if filename.endswith(".dak"):
928 filename = filename[:-4]+".changes"
930 if not filename.endswith(".changes"):
931 error = "invalid file type; not a changes file"
933 if not os.access(filename,os.R_OK):
934 if os.path.exists(filename):
935 error = "permission denied"
937 error = "file not found"
940 if require_changes == 1:
941 fubar("%s: %s." % (orig_filename, error))
942 elif require_changes == 0:
943 warn("Skipping %s - %s" % (orig_filename, error))
945 else: # We only care about the .dak file
950 ################################################################################
953 return (arch != "source" and arch != "all")
955 ################################################################################
957 def join_with_commas_and(list):
958 if len(list) == 0: return "nothing"
959 if len(list) == 1: return list[0]
960 return ", ".join(list[:-1]) + " and " + list[-1]
962 ################################################################################
967 (pkg, version, constraint) = atom
969 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
972 pp_deps.append(pp_dep)
973 return " |".join(pp_deps)
975 ################################################################################
980 ################################################################################
982 def parse_args(Options):
983 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
984 # XXX: This should go away and everything which calls it be converted
985 # to use SQLA properly. For now, we'll just fix it not to use
986 # the old Pg interface though
987 session = DBConn().session()
991 for suitename in split_args(Options["Suite"]):
992 suite = get_suite(suitename, session=session)
993 if suite.suite_id is None:
994 warn("suite '%s' not recognised." % (suite.suite_name))
996 suite_ids_list.append(suite.suite_id)
998 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1000 fubar("No valid suite given.")
1005 if Options["Component"]:
1006 component_ids_list = []
1007 for componentname in split_args(Options["Component"]):
1008 component = get_component(componentname, session=session)
1009 if component is None:
1010 warn("component '%s' not recognised." % (componentname))
1012 component_ids_list.append(component.component_id)
1013 if component_ids_list:
1014 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1016 fubar("No valid component given.")
1020 # Process architecture
1021 con_architectures = ""
1023 if Options["Architecture"]:
1025 for archname in split_args(Options["Architecture"]):
1026 if archname == "source":
1029 arch = get_architecture(archname, session=session)
1031 warn("architecture '%s' not recognised." % (archname))
1033 arch_ids_list.append(arch.arch_id)
1035 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1037 if not check_source:
1038 fubar("No valid architecture given.")
1042 return (con_suites, con_architectures, con_components, check_source)
1044 ################################################################################
1046 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1047 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1050 tb = sys.exc_info()[2]
1057 frame = frame.f_back
1059 traceback.print_exc()
1061 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1062 frame.f_code.co_filename,
1064 for key, value in frame.f_locals.items():
1065 print "\t%20s = " % key,
1069 print "<unable to print>"
1071 ################################################################################
1073 def try_with_debug(function):
1081 ################################################################################
1083 def arch_compare_sw (a, b):
1085 Function for use in sorting lists of architectures.
1087 Sorts normally except that 'source' dominates all others.
1090 if a == "source" and b == "source":
1099 ################################################################################
1101 def split_args (s, dwim=1):
1103 Split command line arguments which can be separated by either commas
1104 or whitespace. If dwim is set, it will complain about string ending
1105 in comma since this usually means someone did 'dak ls -a i386, m68k
1106 foo' or something and the inevitable confusion resulting from 'm68k'
1107 being treated as an argument is undesirable.
1110 if s.find(",") == -1:
1113 if s[-1:] == "," and dwim:
1114 fubar("split_args: found trailing comma, spurious space maybe?")
1117 ################################################################################
1119 def gpgv_get_status_output(cmd, status_read, status_write):
1121 Our very own version of commands.getouputstatus(), hacked to support
1125 cmd = ['/bin/sh', '-c', cmd]
1126 p2cread, p2cwrite = os.pipe()
1127 c2pread, c2pwrite = os.pipe()
1128 errout, errin = os.pipe()
1138 for i in range(3, 256):
1139 if i != status_write:
1145 os.execvp(cmd[0], cmd)
1151 os.dup2(c2pread, c2pwrite)
1152 os.dup2(errout, errin)
1154 output = status = ""
1156 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1159 r = os.read(fd, 8196)
1161 more_data.append(fd)
1162 if fd == c2pwrite or fd == errin:
1164 elif fd == status_read:
1167 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1169 pid, exit_status = os.waitpid(pid, 0)
1171 os.close(status_write)
1172 os.close(status_read)
1182 return output, status, exit_status
1184 ################################################################################
1186 def process_gpgv_output(status):
1187 # Process the status-fd output
1190 for line in status.split('\n'):
1194 split = line.split()
1196 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1198 (gnupg, keyword) = split[:2]
1199 if gnupg != "[GNUPG:]":
1200 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1203 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1204 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1207 keywords[keyword] = args
1209 return (keywords, internal_error)
1211 ################################################################################
1213 def retrieve_key (filename, keyserver=None, keyring=None):
1215 Retrieve the key that signed 'filename' from 'keyserver' and
1216 add it to 'keyring'. Returns nothing on success, or an error message
1220 # Defaults for keyserver and keyring
1222 keyserver = Cnf["Dinstall::KeyServer"]
1224 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1226 # Ensure the filename contains no shell meta-characters or other badness
1227 if not re_taint_free.match(filename):
1228 return "%s: tainted filename" % (filename)
1230 # Invoke gpgv on the file
1231 status_read, status_write = os.pipe()
1232 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1233 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1235 # Process the status-fd output
1236 (keywords, internal_error) = process_gpgv_output(status)
1238 return internal_error
1240 if not keywords.has_key("NO_PUBKEY"):
1241 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1243 fingerprint = keywords["NO_PUBKEY"][0]
1244 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1245 # it'll try to create a lockfile in /dev. A better solution might
1246 # be a tempfile or something.
1247 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1248 % (Cnf["Dinstall::SigningKeyring"])
1249 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1250 % (keyring, keyserver, fingerprint)
1251 (result, output) = commands.getstatusoutput(cmd)
1253 return "'%s' failed with exit code %s" % (cmd, result)
1257 ################################################################################
1259 def gpg_keyring_args(keyrings=None):
1261 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1263 return " ".join(["--keyring %s" % x for x in keyrings])
1265 ################################################################################
1267 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1269 Check the signature of a file and return the fingerprint if the
1270 signature is valid or 'None' if it's not. The first argument is the
1271 filename whose signature should be checked. The second argument is a
1272 reject function and is called when an error is found. The reject()
1273 function must allow for two arguments: the first is the error message,
1274 the second is an optional prefix string. It's possible for reject()
1275 to be called more than once during an invocation of check_signature().
1276 The third argument is optional and is the name of the files the
1277 detached signature applies to. The fourth argument is optional and is
1278 a *list* of keyrings to use. 'autofetch' can either be None, True or
1279 False. If None, the default behaviour specified in the config will be
1285 # Ensure the filename contains no shell meta-characters or other badness
1286 if not re_taint_free.match(sig_filename):
1287 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1288 return (None, rejects)
1290 if data_filename and not re_taint_free.match(data_filename):
1291 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1292 return (None, rejects)
1295 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1297 # Autofetch the signing key if that's enabled
1298 if autofetch == None:
1299 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1301 error_msg = retrieve_key(sig_filename)
1303 rejects.append(error_msg)
1304 return (None, rejects)
1306 # Build the command line
1307 status_read, status_write = os.pipe()
1308 cmd = "gpgv --status-fd %s %s %s %s" % (
1309 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1311 # Invoke gpgv on the file
1312 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1314 # Process the status-fd output
1315 (keywords, internal_error) = process_gpgv_output(status)
1317 # If we failed to parse the status-fd output, let's just whine and bail now
1319 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1320 rejects.append(internal_error, "")
1321 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1322 return (None, rejects)
1324 # Now check for obviously bad things in the processed output
1325 if keywords.has_key("KEYREVOKED"):
1326 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1327 if keywords.has_key("BADSIG"):
1328 rejects.append("bad signature on %s." % (sig_filename))
1329 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1330 rejects.append("failed to check signature on %s." % (sig_filename))
1331 if keywords.has_key("NO_PUBKEY"):
1332 args = keywords["NO_PUBKEY"]
1335 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1336 if keywords.has_key("BADARMOR"):
1337 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1338 if keywords.has_key("NODATA"):
1339 rejects.append("no signature found in %s." % (sig_filename))
1340 if keywords.has_key("EXPKEYSIG"):
1341 args = keywords["EXPKEYSIG"]
1344 rejects.append("Signature made by expired key 0x%s" % (key))
1345 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1346 args = keywords["KEYEXPIRED"]
1350 if timestamp.count("T") == 0:
1352 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1354 expiredate = "unknown (%s)" % (timestamp)
1356 expiredate = timestamp
1357 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1359 if len(rejects) > 0:
1360 return (None, rejects)
1362 # Next check gpgv exited with a zero return code
1364 rejects.append("gpgv failed while checking %s." % (sig_filename))
1366 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1368 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1369 return (None, rejects)
1371 # Sanity check the good stuff we expect
1372 if not keywords.has_key("VALIDSIG"):
1373 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1375 args = keywords["VALIDSIG"]
1377 rejects.append("internal error while checking signature on %s." % (sig_filename))
1379 fingerprint = args[0]
1380 if not keywords.has_key("GOODSIG"):
1381 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1382 if not keywords.has_key("SIG_ID"):
1383 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1385 # Finally ensure there's not something we don't recognise
1386 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1387 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1388 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1390 for keyword in keywords.keys():
1391 if not known_keywords.has_key(keyword):
1392 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1394 if len(rejects) > 0:
1395 return (None, rejects)
1397 return (fingerprint, [])
1399 ################################################################################
1401 def gpg_get_key_addresses(fingerprint):
1402 """retreive email addresses from gpg key uids for a given fingerprint"""
1403 addresses = key_uid_email_cache.get(fingerprint)
1404 if addresses != None:
1407 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1408 % (gpg_keyring_args(), fingerprint)
1409 (result, output) = commands.getstatusoutput(cmd)
1411 for l in output.split('\n'):
1412 m = re_gpg_uid.match(l)
1414 addresses.add(m.group(1))
1415 key_uid_email_cache[fingerprint] = addresses
1418 ################################################################################
1420 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1422 def wrap(paragraph, max_length, prefix=""):
1426 words = paragraph.split()
1429 word_size = len(word)
1430 if word_size > max_length:
1432 s += line + '\n' + prefix
1433 s += word + '\n' + prefix
1436 new_length = len(line) + word_size + 1
1437 if new_length > max_length:
1438 s += line + '\n' + prefix
1451 ################################################################################
1453 def clean_symlink (src, dest, root):
1455 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1458 src = src.replace(root, '', 1)
1459 dest = dest.replace(root, '', 1)
1460 dest = os.path.dirname(dest)
1461 new_src = '../' * len(dest.split('/'))
1462 return new_src + src
1464 ################################################################################
1466 def temp_filename(directory=None, prefix="dak", suffix=""):
1468 Return a secure and unique filename by pre-creating it.
1469 If 'directory' is non-null, it will be the directory the file is pre-created in.
1470 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1471 If 'suffix' is non-null, the filename will end with it.
1473 Returns a pair (fd, name).
1476 return tempfile.mkstemp(suffix, prefix, directory)
1478 ################################################################################
1480 def temp_dirname(parent=None, prefix="dak", suffix=""):
1482 Return a secure and unique directory by pre-creating it.
1483 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1484 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1485 If 'suffix' is non-null, the filename will end with it.
1487 Returns a pathname to the new directory
1490 return tempfile.mkdtemp(suffix, prefix, parent)
1492 ################################################################################
1494 def is_email_alias(email):
1495 """ checks if the user part of the email is listed in the alias file """
1497 if alias_cache == None:
1498 aliasfn = which_alias_file()
1501 for l in open(aliasfn):
1502 alias_cache.add(l.split(':')[0])
1503 uid = email.split('@')[0]
1504 return uid in alias_cache
1506 ################################################################################
1508 def get_changes_files(from_dir):
1510 Takes a directory and lists all .changes files in it (as well as chdir'ing
1511 to the directory; this is due to broken behaviour on the part of p-u/p-a
1512 when you're not in the right place)
1514 Returns a list of filenames
1517 # Much of the rest of p-u/p-a depends on being in the right place
1519 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1521 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1523 return changes_files
1525 ################################################################################
1529 Cnf = apt_pkg.newConfiguration()
1530 if not os.getenv("DAK_TEST"):
1531 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1533 if which_conf_file() != default_config:
1534 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())