2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from formats import parse_format, validate_changes_format
52 from srcformats import get_format_from_string
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
68 # code in lenny's Python. This also affects commands.getoutput and
70 def dak_getstatusoutput(cmd):
71 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
72 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
74 output = "".join(pipe.stdout.readlines())
76 if output[-1:] == '\n':
84 commands.getstatusoutput = dak_getstatusoutput
86 ################################################################################
89 """ Escape html chars """
90 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
92 ################################################################################
94 def open_file(filename, mode='r'):
96 Open C{file}, return fileobject.
98 @type filename: string
99 @param filename: path/filename to open
102 @param mode: open mode
105 @return: open fileobject
107 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
111 f = open(filename, mode)
113 raise CantOpenError, filename
116 ################################################################################
118 def our_raw_input(prompt=""):
122 sys.stdout.write(prompt)
131 sys.stderr.write("\nUser interrupt (^D).\n")
134 ################################################################################
136 def extract_component_from_section(section):
139 if section.find('/') != -1:
140 component = section.split('/')[0]
142 # Expand default component
144 if Cnf.has_key("Component::%s" % section):
149 return (section, component)
151 ################################################################################
153 def parse_deb822(contents, signing_rules=0):
157 # Split the lines in the input, keeping the linebreaks.
158 lines = contents.splitlines(True)
161 raise ParseChangesError, "[Empty changes file]"
163 # Reindex by line number so we can easily verify the format of
169 indexed_lines[index] = line[:-1]
173 num_of_lines = len(indexed_lines.keys())
176 while index < num_of_lines:
178 line = indexed_lines[index]
180 if signing_rules == 1:
182 if index > num_of_lines:
183 raise InvalidDscError, index
184 line = indexed_lines[index]
185 if not line.startswith("-----BEGIN PGP SIGNATURE"):
186 raise InvalidDscError, index
191 if line.startswith("-----BEGIN PGP SIGNATURE"):
193 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
195 if signing_rules == 1:
196 while index < num_of_lines and line != "":
198 line = indexed_lines[index]
200 # If we're not inside the signed data, don't process anything
201 if signing_rules >= 0 and not inside_signature:
203 slf = re_single_line_field.match(line)
205 field = slf.groups()[0].lower()
206 changes[field] = slf.groups()[1]
210 changes[field] += '\n'
212 mlf = re_multi_line_field.match(line)
215 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
216 if first == 1 and changes[field] != "":
217 changes[field] += '\n'
219 changes[field] += mlf.groups()[0] + '\n'
223 if signing_rules == 1 and inside_signature:
224 raise InvalidDscError, index
226 changes["filecontents"] = "".join(lines)
228 if changes.has_key("source"):
229 # Strip the source version in brackets from the source field,
230 # put it in the "source-version" field instead.
231 srcver = re_srchasver.search(changes["source"])
233 changes["source"] = srcver.group(1)
234 changes["source-version"] = srcver.group(2)
237 raise ParseChangesError, error
241 ################################################################################
243 def parse_changes(filename, signing_rules=0):
245 Parses a changes file and returns a dictionary where each field is a
246 key. The mandatory first argument is the filename of the .changes
249 signing_rules is an optional argument:
251 - If signing_rules == -1, no signature is required.
252 - If signing_rules == 0 (the default), a signature is required.
253 - If signing_rules == 1, it turns on the same strict format checking
256 The rules for (signing_rules == 1)-mode are:
258 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
259 followed by any PGP header data and must end with a blank line.
261 - The data section must end with a blank line and must be followed by
262 "-----BEGIN PGP SIGNATURE-----".
265 changes_in = open_file(filename)
266 content = changes_in.read()
269 unicode(content, 'utf-8')
271 raise ChangesUnicodeError, "Changes file not proper utf-8"
272 return parse_deb822(content, signing_rules)
274 ################################################################################
276 def hash_key(hashname):
277 return '%ssum' % hashname
279 ################################################################################
281 def create_hash(where, files, hashname, hashfunc):
283 create_hash extends the passed files dict with the given hash by
284 iterating over all files on disk and passing them to the hashing
289 for f in files.keys():
291 file_handle = open_file(f)
292 except CantOpenError:
293 rejmsg.append("Could not open file %s for checksumming" % (f))
296 files[f][hash_key(hashname)] = hashfunc(file_handle)
301 ################################################################################
303 def check_hash(where, files, hashname, hashfunc):
305 check_hash checks the given hash in the files dict against the actual
306 files on disk. The hash values need to be present consistently in
307 all file entries. It does not modify its input in any way.
311 for f in files.keys():
315 file_handle = open_file(f)
317 # Check for the hash entry, to not trigger a KeyError.
318 if not files[f].has_key(hash_key(hashname)):
319 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
323 # Actually check the hash for correctness.
324 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
325 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
327 except CantOpenError:
328 # TODO: This happens when the file is in the pool.
329 # warn("Cannot open file %s" % f)
336 ################################################################################
338 def check_size(where, files):
340 check_size checks the file sizes in the passed files dict against the
345 for f in files.keys():
350 # TODO: This happens when the file is in the pool.
354 actual_size = entry[stat.ST_SIZE]
355 size = int(files[f]["size"])
356 if size != actual_size:
357 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
358 % (f, actual_size, size, where))
361 ################################################################################
363 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
365 Verify that the files listed in the Files field of the .dsc are
366 those expected given the announced Format.
368 @type dsc_filename: string
369 @param dsc_filename: path of .dsc file
372 @param dsc: the content of the .dsc parsed by C{parse_changes()}
374 @type dsc_files: dict
375 @param dsc_files: the file list returned by C{build_file_list()}
378 @return: all errors detected
382 # Parse the file if needed
384 dsc = parse_changes(dsc_filename, signing_rules=1);
386 if dsc_files is None:
387 dsc_files = build_file_list(dsc, is_a_dsc=1)
389 # Ensure .dsc lists proper set of source files according to the format
391 has = defaultdict(lambda: 0)
394 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
395 (r'diff.gz', ('debian_diff',)),
396 (r'tar.gz', ('native_tar_gz', 'native_tar')),
397 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
398 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
399 (r'tar\.(gz|bz2)', ('native_tar',)),
400 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
403 for f in dsc_files.keys():
404 m = re_issource.match(f)
406 rejmsg.append("%s: %s in Files field not recognised as source."
410 # Populate 'has' dictionary by resolving keys in lookup table
412 for regex, keys in ftype_lookup:
413 if re.match(regex, m.group(3)):
419 # File does not match anything in lookup table; reject
421 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
423 # Check for multiple files
424 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
425 if has[file_type] > 1:
426 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
428 # Source format specific tests
430 format = get_format_from_string(dsc['format'])
432 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
435 except UnknownFormatError:
436 # Not an error here for now
441 ################################################################################
443 def check_hash_fields(what, manifest):
445 check_hash_fields ensures that there are no checksum fields in the
446 given dict that we do not know about.
450 hashes = map(lambda x: x[0], known_hashes)
451 for field in manifest:
452 if field.startswith("checksums-"):
453 hashname = field.split("-",1)[1]
454 if hashname not in hashes:
455 rejmsg.append("Unsupported checksum field for %s "\
456 "in %s" % (hashname, what))
459 ################################################################################
461 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
462 if format >= version:
463 # The version should contain the specified hash.
466 # Import hashes from the changes
467 rejmsg = parse_checksums(".changes", files, changes, hashname)
471 # We need to calculate the hash because it can't possibly
474 return func(".changes", files, hashname, hashfunc)
476 # We could add the orig which might be in the pool to the files dict to
477 # access the checksums easily.
479 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
481 ensure_dsc_hashes' task is to ensure that each and every *present* hash
482 in the dsc is correct, i.e. identical to the changes file and if necessary
483 the pool. The latter task is delegated to check_hash.
487 if not dsc.has_key('Checksums-%s' % (hashname,)):
489 # Import hashes from the dsc
490 parse_checksums(".dsc", dsc_files, dsc, hashname)
492 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
495 ################################################################################
497 def parse_checksums(where, files, manifest, hashname):
499 field = 'checksums-%s' % hashname
500 if not field in manifest:
502 for line in manifest[field].split('\n'):
505 clist = line.strip().split(' ')
507 checksum, size, checkfile = clist
509 rejmsg.append("Cannot parse checksum line [%s]" % (line))
511 if not files.has_key(checkfile):
512 # TODO: check for the file's entry in the original files dict, not
513 # the one modified by (auto)byhand and other weird stuff
514 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
515 # (file, hashname, where))
517 if not files[checkfile]["size"] == size:
518 rejmsg.append("%s: size differs for files and checksums-%s entry "\
519 "in %s" % (checkfile, hashname, where))
521 files[checkfile][hash_key(hashname)] = checksum
522 for f in files.keys():
523 if not files[f].has_key(hash_key(hashname)):
524 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
528 ################################################################################
530 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
532 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
535 # Make sure we have a Files: field to parse...
536 if not changes.has_key(field):
537 raise NoFilesFieldError
539 # Validate .changes Format: field
541 validate_changes_format(parse_format(changes['format']), field)
543 includes_section = (not is_a_dsc) and field == "files"
545 # Parse each entry/line:
546 for i in changes[field].split('\n'):
550 section = priority = ""
553 (md5, size, section, priority, name) = s
555 (md5, size, name) = s
557 raise ParseChangesError, i
564 (section, component) = extract_component_from_section(section)
566 files[name] = dict(size=size, section=section,
567 priority=priority, component=component)
568 files[name][hashname] = md5
572 ################################################################################
574 def send_mail (message, filename=""):
575 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
577 # Check whether we're supposed to be sending mail
578 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
581 # If we've been passed a string dump it into a temporary file
583 (fd, filename) = tempfile.mkstemp()
584 os.write (fd, message)
587 if Cnf.has_key("Dinstall::MailWhiteList") and \
588 Cnf["Dinstall::MailWhiteList"] != "":
589 message_in = open_file(filename)
590 message_raw = modemail.message_from_file(message_in)
594 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
596 for line in whitelist_in:
597 if not re_whitespace_comment.match(line):
598 if re_re_mark.match(line):
599 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
601 whitelist.append(re.compile(re.escape(line.strip())))
606 fields = ["To", "Bcc", "Cc"]
609 value = message_raw.get(field, None)
612 for item in value.split(","):
613 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
619 if not mail_whitelisted:
620 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
624 # Doesn't have any mail in whitelist so remove the header
626 del message_raw[field]
628 message_raw.replace_header(field, ', '.join(match))
630 # Change message fields in order if we don't have a To header
631 if not message_raw.has_key("To"):
634 if message_raw.has_key(field):
635 message_raw[fields[-1]] = message_raw[field]
636 del message_raw[field]
639 # Clean up any temporary files
640 # and return, as we removed all recipients.
642 os.unlink (filename);
645 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
646 os.write (fd, message_raw.as_string(True));
650 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
652 raise SendmailFailedError, output
654 # Clean up any temporary files
658 ################################################################################
660 def poolify (source, component):
663 if source[:3] == "lib":
664 return component + source[:4] + '/' + source + '/'
666 return component + source[:1] + '/' + source + '/'
668 ################################################################################
670 def move (src, dest, overwrite = 0, perms = 0664):
671 if os.path.exists(dest) and os.path.isdir(dest):
674 dest_dir = os.path.dirname(dest)
675 if not os.path.exists(dest_dir):
676 umask = os.umask(00000)
677 os.makedirs(dest_dir, 02775)
679 #print "Moving %s to %s..." % (src, dest)
680 if os.path.exists(dest) and os.path.isdir(dest):
681 dest += '/' + os.path.basename(src)
682 # Don't overwrite unless forced to
683 if os.path.exists(dest):
685 fubar("Can't move %s to %s - file already exists." % (src, dest))
687 if not os.access(dest, os.W_OK):
688 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
689 shutil.copy2(src, dest)
690 os.chmod(dest, perms)
693 def copy (src, dest, overwrite = 0, perms = 0664):
694 if os.path.exists(dest) and os.path.isdir(dest):
697 dest_dir = os.path.dirname(dest)
698 if not os.path.exists(dest_dir):
699 umask = os.umask(00000)
700 os.makedirs(dest_dir, 02775)
702 #print "Copying %s to %s..." % (src, dest)
703 if os.path.exists(dest) and os.path.isdir(dest):
704 dest += '/' + os.path.basename(src)
705 # Don't overwrite unless forced to
706 if os.path.exists(dest):
708 raise FileExistsError
710 if not os.access(dest, os.W_OK):
711 raise CantOverwriteError
712 shutil.copy2(src, dest)
713 os.chmod(dest, perms)
715 ################################################################################
718 res = socket.gethostbyaddr(socket.gethostname())
719 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
720 if database_hostname:
721 return database_hostname
725 def which_conf_file ():
726 if os.getenv('DAK_CONFIG'):
727 return os.getenv('DAK_CONFIG')
729 res = socket.gethostbyaddr(socket.gethostname())
730 # In case we allow local config files per user, try if one exists
731 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
732 homedir = os.getenv("HOME")
733 confpath = os.path.join(homedir, "/etc/dak.conf")
734 if os.path.exists(confpath):
735 apt_pkg.ReadConfigFileISC(Cnf,default_config)
737 # We are still in here, so there is no local config file or we do
738 # not allow local files. Do the normal stuff.
739 if Cnf.get("Config::" + res[0] + "::DakConfig"):
740 return Cnf["Config::" + res[0] + "::DakConfig"]
742 return default_config
744 def which_apt_conf_file ():
745 res = socket.gethostbyaddr(socket.gethostname())
746 # In case we allow local config files per user, try if one exists
747 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
748 homedir = os.getenv("HOME")
749 confpath = os.path.join(homedir, "/etc/dak.conf")
750 if os.path.exists(confpath):
751 apt_pkg.ReadConfigFileISC(Cnf,default_config)
753 if Cnf.get("Config::" + res[0] + "::AptConfig"):
754 return Cnf["Config::" + res[0] + "::AptConfig"]
756 return default_apt_config
758 def which_alias_file():
759 hostname = socket.gethostbyaddr(socket.gethostname())[0]
760 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
761 if os.path.exists(aliasfn):
766 ################################################################################
768 def TemplateSubst(subst_map, filename):
769 """ Perform a substition of template """
770 templatefile = open_file(filename)
771 template = templatefile.read()
772 for k, v in subst_map.iteritems():
773 template = template.replace(k, str(v))
777 ################################################################################
779 def fubar(msg, exit_code=1):
780 sys.stderr.write("E: %s\n" % (msg))
784 sys.stderr.write("W: %s\n" % (msg))
786 ################################################################################
788 # Returns the user name with a laughable attempt at rfc822 conformancy
789 # (read: removing stray periods).
791 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
794 return pwd.getpwuid(os.getuid())[0]
796 ################################################################################
806 return ("%d%s" % (c, t))
808 ################################################################################
810 def cc_fix_changes (changes):
811 o = changes.get("architecture", "")
813 del changes["architecture"]
814 changes["architecture"] = {}
816 changes["architecture"][j] = 1
818 def changes_compare (a, b):
819 """ Sort by source name, source version, 'have source', and then by filename """
821 a_changes = parse_changes(a)
826 b_changes = parse_changes(b)
830 cc_fix_changes (a_changes)
831 cc_fix_changes (b_changes)
833 # Sort by source name
834 a_source = a_changes.get("source")
835 b_source = b_changes.get("source")
836 q = cmp (a_source, b_source)
840 # Sort by source version
841 a_version = a_changes.get("version", "0")
842 b_version = b_changes.get("version", "0")
843 q = apt_pkg.VersionCompare(a_version, b_version)
847 # Sort by 'have source'
848 a_has_source = a_changes["architecture"].get("source")
849 b_has_source = b_changes["architecture"].get("source")
850 if a_has_source and not b_has_source:
852 elif b_has_source and not a_has_source:
855 # Fall back to sort by filename
858 ################################################################################
860 def find_next_free (dest, too_many=100):
863 while os.path.exists(dest) and extra < too_many:
864 dest = orig_dest + '.' + repr(extra)
866 if extra >= too_many:
867 raise NoFreeFilenameError
870 ################################################################################
872 def result_join (original, sep = '\t'):
874 for i in xrange(len(original)):
875 if original[i] == None:
876 resultlist.append("")
878 resultlist.append(original[i])
879 return sep.join(resultlist)
881 ################################################################################
883 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
885 for line in str.split('\n'):
887 if line or include_blank_lines:
888 out += "%s%s\n" % (prefix, line)
889 # Strip trailing new line
894 ################################################################################
896 def validate_changes_file_arg(filename, require_changes=1):
898 'filename' is either a .changes or .dak file. If 'filename' is a
899 .dak file, it's changed to be the corresponding .changes file. The
900 function then checks if the .changes file a) exists and b) is
901 readable and returns the .changes filename if so. If there's a
902 problem, the next action depends on the option 'require_changes'
905 - If 'require_changes' == -1, errors are ignored and the .changes
906 filename is returned.
907 - If 'require_changes' == 0, a warning is given and 'None' is returned.
908 - If 'require_changes' == 1, a fatal error is raised.
913 orig_filename = filename
914 if filename.endswith(".dak"):
915 filename = filename[:-4]+".changes"
917 if not filename.endswith(".changes"):
918 error = "invalid file type; not a changes file"
920 if not os.access(filename,os.R_OK):
921 if os.path.exists(filename):
922 error = "permission denied"
924 error = "file not found"
927 if require_changes == 1:
928 fubar("%s: %s." % (orig_filename, error))
929 elif require_changes == 0:
930 warn("Skipping %s - %s" % (orig_filename, error))
932 else: # We only care about the .dak file
937 ################################################################################
940 return (arch != "source" and arch != "all")
942 ################################################################################
944 def join_with_commas_and(list):
945 if len(list) == 0: return "nothing"
946 if len(list) == 1: return list[0]
947 return ", ".join(list[:-1]) + " and " + list[-1]
949 ################################################################################
954 (pkg, version, constraint) = atom
956 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
959 pp_deps.append(pp_dep)
960 return " |".join(pp_deps)
962 ################################################################################
967 ################################################################################
969 def parse_args(Options):
970 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
971 # XXX: This should go away and everything which calls it be converted
972 # to use SQLA properly. For now, we'll just fix it not to use
973 # the old Pg interface though
974 session = DBConn().session()
978 for suitename in split_args(Options["Suite"]):
979 suite = get_suite(suitename, session=session)
980 if suite.suite_id is None:
981 warn("suite '%s' not recognised." % (suite.suite_name))
983 suite_ids_list.append(suite.suite_id)
985 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
987 fubar("No valid suite given.")
992 if Options["Component"]:
993 component_ids_list = []
994 for componentname in split_args(Options["Component"]):
995 component = get_component(componentname, session=session)
996 if component is None:
997 warn("component '%s' not recognised." % (componentname))
999 component_ids_list.append(component.component_id)
1000 if component_ids_list:
1001 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1003 fubar("No valid component given.")
1007 # Process architecture
1008 con_architectures = ""
1010 if Options["Architecture"]:
1012 for archname in split_args(Options["Architecture"]):
1013 if archname == "source":
1016 arch = get_architecture(archname, session=session)
1018 warn("architecture '%s' not recognised." % (archname))
1020 arch_ids_list.append(arch.arch_id)
1022 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1024 if not check_source:
1025 fubar("No valid architecture given.")
1029 return (con_suites, con_architectures, con_components, check_source)
1031 ################################################################################
1033 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1034 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1037 tb = sys.exc_info()[2]
1044 frame = frame.f_back
1046 traceback.print_exc()
1048 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1049 frame.f_code.co_filename,
1051 for key, value in frame.f_locals.items():
1052 print "\t%20s = " % key,
1056 print "<unable to print>"
1058 ################################################################################
1060 def try_with_debug(function):
1068 ################################################################################
1070 def arch_compare_sw (a, b):
1072 Function for use in sorting lists of architectures.
1074 Sorts normally except that 'source' dominates all others.
1077 if a == "source" and b == "source":
1086 ################################################################################
1088 def split_args (s, dwim=1):
1090 Split command line arguments which can be separated by either commas
1091 or whitespace. If dwim is set, it will complain about string ending
1092 in comma since this usually means someone did 'dak ls -a i386, m68k
1093 foo' or something and the inevitable confusion resulting from 'm68k'
1094 being treated as an argument is undesirable.
1097 if s.find(",") == -1:
1100 if s[-1:] == "," and dwim:
1101 fubar("split_args: found trailing comma, spurious space maybe?")
1104 ################################################################################
1106 def gpgv_get_status_output(cmd, status_read, status_write):
1108 Our very own version of commands.getouputstatus(), hacked to support
1112 cmd = ['/bin/sh', '-c', cmd]
1113 p2cread, p2cwrite = os.pipe()
1114 c2pread, c2pwrite = os.pipe()
1115 errout, errin = os.pipe()
1125 for i in range(3, 256):
1126 if i != status_write:
1132 os.execvp(cmd[0], cmd)
1138 os.dup2(c2pread, c2pwrite)
1139 os.dup2(errout, errin)
1141 output = status = ""
1143 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1146 r = os.read(fd, 8196)
1148 more_data.append(fd)
1149 if fd == c2pwrite or fd == errin:
1151 elif fd == status_read:
1154 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1156 pid, exit_status = os.waitpid(pid, 0)
1158 os.close(status_write)
1159 os.close(status_read)
1169 return output, status, exit_status
1171 ################################################################################
1173 def process_gpgv_output(status):
1174 # Process the status-fd output
1177 for line in status.split('\n'):
1181 split = line.split()
1183 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1185 (gnupg, keyword) = split[:2]
1186 if gnupg != "[GNUPG:]":
1187 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1190 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1191 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1194 keywords[keyword] = args
1196 return (keywords, internal_error)
1198 ################################################################################
1200 def retrieve_key (filename, keyserver=None, keyring=None):
1202 Retrieve the key that signed 'filename' from 'keyserver' and
1203 add it to 'keyring'. Returns nothing on success, or an error message
1207 # Defaults for keyserver and keyring
1209 keyserver = Cnf["Dinstall::KeyServer"]
1211 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1213 # Ensure the filename contains no shell meta-characters or other badness
1214 if not re_taint_free.match(filename):
1215 return "%s: tainted filename" % (filename)
1217 # Invoke gpgv on the file
1218 status_read, status_write = os.pipe()
1219 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1220 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1222 # Process the status-fd output
1223 (keywords, internal_error) = process_gpgv_output(status)
1225 return internal_error
1227 if not keywords.has_key("NO_PUBKEY"):
1228 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1230 fingerprint = keywords["NO_PUBKEY"][0]
1231 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1232 # it'll try to create a lockfile in /dev. A better solution might
1233 # be a tempfile or something.
1234 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1235 % (Cnf["Dinstall::SigningKeyring"])
1236 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1237 % (keyring, keyserver, fingerprint)
1238 (result, output) = commands.getstatusoutput(cmd)
1240 return "'%s' failed with exit code %s" % (cmd, result)
1244 ################################################################################
1246 def gpg_keyring_args(keyrings=None):
1248 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1250 return " ".join(["--keyring %s" % x for x in keyrings])
1252 ################################################################################
1254 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1256 Check the signature of a file and return the fingerprint if the
1257 signature is valid or 'None' if it's not. The first argument is the
1258 filename whose signature should be checked. The second argument is a
1259 reject function and is called when an error is found. The reject()
1260 function must allow for two arguments: the first is the error message,
1261 the second is an optional prefix string. It's possible for reject()
1262 to be called more than once during an invocation of check_signature().
1263 The third argument is optional and is the name of the files the
1264 detached signature applies to. The fourth argument is optional and is
1265 a *list* of keyrings to use. 'autofetch' can either be None, True or
1266 False. If None, the default behaviour specified in the config will be
1272 # Ensure the filename contains no shell meta-characters or other badness
1273 if not re_taint_free.match(sig_filename):
1274 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1275 return (None, rejects)
1277 if data_filename and not re_taint_free.match(data_filename):
1278 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1279 return (None, rejects)
1282 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1284 # Autofetch the signing key if that's enabled
1285 if autofetch == None:
1286 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1288 error_msg = retrieve_key(sig_filename)
1290 rejects.append(error_msg)
1291 return (None, rejects)
1293 # Build the command line
1294 status_read, status_write = os.pipe()
1295 cmd = "gpgv --status-fd %s %s %s %s" % (
1296 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1298 # Invoke gpgv on the file
1299 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1301 # Process the status-fd output
1302 (keywords, internal_error) = process_gpgv_output(status)
1304 # If we failed to parse the status-fd output, let's just whine and bail now
1306 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1307 rejects.append(internal_error, "")
1308 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1309 return (None, rejects)
1311 # Now check for obviously bad things in the processed output
1312 if keywords.has_key("KEYREVOKED"):
1313 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1314 if keywords.has_key("BADSIG"):
1315 rejects.append("bad signature on %s." % (sig_filename))
1316 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1317 rejects.append("failed to check signature on %s." % (sig_filename))
1318 if keywords.has_key("NO_PUBKEY"):
1319 args = keywords["NO_PUBKEY"]
1322 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1323 if keywords.has_key("BADARMOR"):
1324 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1325 if keywords.has_key("NODATA"):
1326 rejects.append("no signature found in %s." % (sig_filename))
1327 if keywords.has_key("EXPKEYSIG"):
1328 args = keywords["EXPKEYSIG"]
1331 rejects.append("Signature made by expired key 0x%s" % (key))
1332 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1333 args = keywords["KEYEXPIRED"]
1337 if timestamp.count("T") == 0:
1339 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1341 expiredate = "unknown (%s)" % (timestamp)
1343 expiredate = timestamp
1344 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1346 if len(rejects) > 0:
1347 return (None, rejects)
1349 # Next check gpgv exited with a zero return code
1351 rejects.append("gpgv failed while checking %s." % (sig_filename))
1353 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1355 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1356 return (None, rejects)
1358 # Sanity check the good stuff we expect
1359 if not keywords.has_key("VALIDSIG"):
1360 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1362 args = keywords["VALIDSIG"]
1364 rejects.append("internal error while checking signature on %s." % (sig_filename))
1366 fingerprint = args[0]
1367 if not keywords.has_key("GOODSIG"):
1368 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1369 if not keywords.has_key("SIG_ID"):
1370 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1372 # Finally ensure there's not something we don't recognise
1373 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1374 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1375 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1377 for keyword in keywords.keys():
1378 if not known_keywords.has_key(keyword):
1379 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1381 if len(rejects) > 0:
1382 return (None, rejects)
1384 return (fingerprint, [])
1386 ################################################################################
1388 def gpg_get_key_addresses(fingerprint):
1389 """retreive email addresses from gpg key uids for a given fingerprint"""
1390 addresses = key_uid_email_cache.get(fingerprint)
1391 if addresses != None:
1394 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1395 % (gpg_keyring_args(), fingerprint)
1396 (result, output) = commands.getstatusoutput(cmd)
1398 for l in output.split('\n'):
1399 m = re_gpg_uid.match(l)
1401 addresses.add(m.group(1))
1402 key_uid_email_cache[fingerprint] = addresses
1405 ################################################################################
1407 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1409 def wrap(paragraph, max_length, prefix=""):
1413 words = paragraph.split()
1416 word_size = len(word)
1417 if word_size > max_length:
1419 s += line + '\n' + prefix
1420 s += word + '\n' + prefix
1423 new_length = len(line) + word_size + 1
1424 if new_length > max_length:
1425 s += line + '\n' + prefix
1438 ################################################################################
1440 def clean_symlink (src, dest, root):
1442 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1445 src = src.replace(root, '', 1)
1446 dest = dest.replace(root, '', 1)
1447 dest = os.path.dirname(dest)
1448 new_src = '../' * len(dest.split('/'))
1449 return new_src + src
1451 ################################################################################
1453 def temp_filename(directory=None, prefix="dak", suffix=""):
1455 Return a secure and unique filename by pre-creating it.
1456 If 'directory' is non-null, it will be the directory the file is pre-created in.
1457 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1458 If 'suffix' is non-null, the filename will end with it.
1460 Returns a pair (fd, name).
1463 return tempfile.mkstemp(suffix, prefix, directory)
1465 ################################################################################
1467 def temp_dirname(parent=None, prefix="dak", suffix=""):
1469 Return a secure and unique directory by pre-creating it.
1470 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1471 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1472 If 'suffix' is non-null, the filename will end with it.
1474 Returns a pathname to the new directory
1477 return tempfile.mkdtemp(suffix, prefix, parent)
1479 ################################################################################
1481 def is_email_alias(email):
1482 """ checks if the user part of the email is listed in the alias file """
1484 if alias_cache == None:
1485 aliasfn = which_alias_file()
1488 for l in open(aliasfn):
1489 alias_cache.add(l.split(':')[0])
1490 uid = email.split('@')[0]
1491 return uid in alias_cache
1493 ################################################################################
1495 def get_changes_files(from_dir):
1497 Takes a directory and lists all .changes files in it (as well as chdir'ing
1498 to the directory; this is due to broken behaviour on the part of p-u/p-a
1499 when you're not in the right place)
1501 Returns a list of filenames
1504 # Much of the rest of p-u/p-a depends on being in the right place
1506 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1508 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1510 return changes_files
1512 ################################################################################
1516 Cnf = apt_pkg.newConfiguration()
1517 if not os.getenv("DAK_TEST"):
1518 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1520 if which_conf_file() != default_config:
1521 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())