2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
42 from dak_exceptions import *
43 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
44 re_multi_line_field, re_srchasver, re_verwithext, \
45 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark
47 ################################################################################
49 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
50 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
52 alias_cache = None #: Cache for email alias checks
53 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
55 # (hashname, function, earliest_changes_version)
56 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
57 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
59 ################################################################################
62 """ Escape html chars """
63 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
65 ################################################################################
67 def open_file(filename, mode='r'):
69 Open C{file}, return fileobject.
71 @type filename: string
72 @param filename: path/filename to open
75 @param mode: open mode
78 @return: open fileobject
80 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
84 f = open(filename, mode)
86 raise CantOpenError, filename
89 ################################################################################
91 def our_raw_input(prompt=""):
93 sys.stdout.write(prompt)
99 sys.stderr.write("\nUser interrupt (^D).\n")
102 ################################################################################
104 def extract_component_from_section(section):
107 if section.find('/') != -1:
108 component = section.split('/')[0]
110 # Expand default component
112 if Cnf.has_key("Component::%s" % section):
117 return (section, component)
119 ################################################################################
121 def parse_deb822(contents, signing_rules=0):
125 # Split the lines in the input, keeping the linebreaks.
126 lines = contents.splitlines(True)
129 raise ParseChangesError, "[Empty changes file]"
131 # Reindex by line number so we can easily verify the format of
137 indexed_lines[index] = line[:-1]
141 num_of_lines = len(indexed_lines.keys())
144 while index < num_of_lines:
146 line = indexed_lines[index]
148 if signing_rules == 1:
150 if index > num_of_lines:
151 raise InvalidDscError, index
152 line = indexed_lines[index]
153 if not line.startswith("-----BEGIN PGP SIGNATURE"):
154 raise InvalidDscError, index
159 if line.startswith("-----BEGIN PGP SIGNATURE"):
161 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
163 if signing_rules == 1:
164 while index < num_of_lines and line != "":
166 line = indexed_lines[index]
168 # If we're not inside the signed data, don't process anything
169 if signing_rules >= 0 and not inside_signature:
171 slf = re_single_line_field.match(line)
173 field = slf.groups()[0].lower()
174 changes[field] = slf.groups()[1]
178 changes[field] += '\n'
180 mlf = re_multi_line_field.match(line)
183 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
184 if first == 1 and changes[field] != "":
185 changes[field] += '\n'
187 changes[field] += mlf.groups()[0] + '\n'
191 if signing_rules == 1 and inside_signature:
192 raise InvalidDscError, index
194 changes["filecontents"] = "".join(lines)
196 if changes.has_key("source"):
197 # Strip the source version in brackets from the source field,
198 # put it in the "source-version" field instead.
199 srcver = re_srchasver.search(changes["source"])
201 changes["source"] = srcver.group(1)
202 changes["source-version"] = srcver.group(2)
205 raise ParseChangesError, error
209 ################################################################################
211 def parse_changes(filename, signing_rules=0):
213 Parses a changes file and returns a dictionary where each field is a
214 key. The mandatory first argument is the filename of the .changes
217 signing_rules is an optional argument:
219 - If signing_rules == -1, no signature is required.
220 - If signing_rules == 0 (the default), a signature is required.
221 - If signing_rules == 1, it turns on the same strict format checking
224 The rules for (signing_rules == 1)-mode are:
226 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
227 followed by any PGP header data and must end with a blank line.
229 - The data section must end with a blank line and must be followed by
230 "-----BEGIN PGP SIGNATURE-----".
233 changes_in = open_file(filename)
234 content = changes_in.read()
236 return parse_deb822(content, signing_rules)
238 ################################################################################
240 def hash_key(hashname):
241 return '%ssum' % hashname
243 ################################################################################
245 def create_hash(where, files, hashname, hashfunc):
247 create_hash extends the passed files dict with the given hash by
248 iterating over all files on disk and passing them to the hashing
253 for f in files.keys():
255 file_handle = open_file(f)
256 except CantOpenError:
257 rejmsg.append("Could not open file %s for checksumming" % (f))
259 files[f][hash_key(hashname)] = hashfunc(file_handle)
264 ################################################################################
266 def check_hash(where, files, hashname, hashfunc):
268 check_hash checks the given hash in the files dict against the actual
269 files on disk. The hash values need to be present consistently in
270 all file entries. It does not modify its input in any way.
274 for f in files.keys():
278 file_handle = open_file(f)
280 # Check for the hash entry, to not trigger a KeyError.
281 if not files[f].has_key(hash_key(hashname)):
282 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
286 # Actually check the hash for correctness.
287 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
288 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
290 except CantOpenError:
291 # TODO: This happens when the file is in the pool.
292 # warn("Cannot open file %s" % f)
299 ################################################################################
301 def check_size(where, files):
303 check_size checks the file sizes in the passed files dict against the
308 for f in files.keys():
313 # TODO: This happens when the file is in the pool.
317 actual_size = entry[stat.ST_SIZE]
318 size = int(files[f]["size"])
319 if size != actual_size:
320 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
321 % (f, actual_size, size, where))
324 ################################################################################
326 def check_hash_fields(what, manifest):
328 check_hash_fields ensures that there are no checksum fields in the
329 given dict that we do not know about.
333 hashes = map(lambda x: x[0], known_hashes)
334 for field in manifest:
335 if field.startswith("checksums-"):
336 hashname = field.split("-",1)[1]
337 if hashname not in hashes:
338 rejmsg.append("Unsupported checksum field for %s "\
339 "in %s" % (hashname, what))
342 ################################################################################
344 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
345 if format >= version:
346 # The version should contain the specified hash.
349 # Import hashes from the changes
350 rejmsg = parse_checksums(".changes", files, changes, hashname)
354 # We need to calculate the hash because it can't possibly
357 return func(".changes", files, hashname, hashfunc)
359 # We could add the orig which might be in the pool to the files dict to
360 # access the checksums easily.
362 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
364 ensure_dsc_hashes' task is to ensure that each and every *present* hash
365 in the dsc is correct, i.e. identical to the changes file and if necessary
366 the pool. The latter task is delegated to check_hash.
370 if not dsc.has_key('Checksums-%s' % (hashname,)):
372 # Import hashes from the dsc
373 parse_checksums(".dsc", dsc_files, dsc, hashname)
375 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
378 ################################################################################
380 def ensure_hashes(changes, dsc, files, dsc_files):
383 # Make sure we recognise the format of the Files: field in the .changes
384 format = changes.get("format", "0.0").split(".", 1)
386 format = int(format[0]), int(format[1])
388 format = int(float(format[0])), 0
390 # We need to deal with the original changes blob, as the fields we need
391 # might not be in the changes dict serialised into the .dak anymore.
392 orig_changes = parse_deb822(changes['filecontents'])
394 # Copy the checksums over to the current changes dict. This will keep
395 # the existing modifications to it intact.
396 for field in orig_changes:
397 if field.startswith('checksums-'):
398 changes[field] = orig_changes[field]
400 # Check for unsupported hashes
401 rejmsg.extend(check_hash_fields(".changes", changes))
402 rejmsg.extend(check_hash_fields(".dsc", dsc))
404 # We have to calculate the hash if we have an earlier changes version than
405 # the hash appears in rather than require it exist in the changes file
406 for hashname, hashfunc, version in known_hashes:
407 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
409 if "source" in changes["architecture"]:
410 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
415 def parse_checksums(where, files, manifest, hashname):
417 field = 'checksums-%s' % hashname
418 if not field in manifest:
420 for line in manifest[field].split('\n'):
423 checksum, size, checkfile = line.strip().split(' ')
424 if not files.has_key(checkfile):
425 # TODO: check for the file's entry in the original files dict, not
426 # the one modified by (auto)byhand and other weird stuff
427 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
428 # (file, hashname, where))
430 if not files[checkfile]["size"] == size:
431 rejmsg.append("%s: size differs for files and checksums-%s entry "\
432 "in %s" % (checkfile, hashname, where))
434 files[checkfile][hash_key(hashname)] = checksum
435 for f in files.keys():
436 if not files[f].has_key(hash_key(hashname)):
437 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
441 ################################################################################
443 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
445 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
448 # Make sure we have a Files: field to parse...
449 if not changes.has_key(field):
450 raise NoFilesFieldError
452 # Make sure we recognise the format of the Files: field
453 format = re_verwithext.search(changes.get("format", "0.0"))
455 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
457 format = format.groups()
458 if format[1] == None:
459 format = int(float(format[0])), 0, format[2]
461 format = int(format[0]), int(format[1]), format[2]
462 if format[2] == None:
466 # format = (1,0) are the only formats we currently accept,
467 # format = (0,0) are missing format headers of which we still
468 # have some in the archive.
469 if format != (1,0) and format != (0,0):
470 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
472 if (format < (1,5) or format > (1,8)):
473 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
474 if field != "files" and format < (1,8):
475 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
477 includes_section = (not is_a_dsc) and field == "files"
479 # Parse each entry/line:
480 for i in changes[field].split('\n'):
484 section = priority = ""
487 (md5, size, section, priority, name) = s
489 (md5, size, name) = s
491 raise ParseChangesError, i
498 (section, component) = extract_component_from_section(section)
500 files[name] = Dict(size=size, section=section,
501 priority=priority, component=component)
502 files[name][hashname] = md5
506 ################################################################################
508 def force_to_utf8(s):
510 Forces a string to UTF-8. If the string isn't already UTF-8,
511 it's assumed to be ISO-8859-1.
517 latin1_s = unicode(s,'iso8859-1')
518 return latin1_s.encode('utf-8')
520 def rfc2047_encode(s):
522 Encodes a (header) string per RFC2047 if necessary. If the
523 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
526 codecs.lookup('ascii')[1](s)
531 codecs.lookup('utf-8')[1](s)
532 h = email.Header.Header(s, 'utf-8', 998)
535 h = email.Header.Header(s, 'iso-8859-1', 998)
538 ################################################################################
540 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
541 # with it. I know - I'll fix the suckage and make things
544 def fix_maintainer (maintainer):
546 Parses a Maintainer or Changed-By field and returns:
547 1. an RFC822 compatible version,
548 2. an RFC2047 compatible version,
552 The name is forced to UTF-8 for both 1. and 3.. If the name field
553 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
554 switched to 'email (name)' format.
557 maintainer = maintainer.strip()
559 return ('', '', '', '')
561 if maintainer.find("<") == -1:
564 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
565 email = maintainer[1:-1]
568 m = re_parse_maintainer.match(maintainer)
570 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
574 # Get an RFC2047 compliant version of the name
575 rfc2047_name = rfc2047_encode(name)
577 # Force the name to be UTF-8
578 name = force_to_utf8(name)
580 if name.find(',') != -1 or name.find('.') != -1:
581 rfc822_maint = "%s (%s)" % (email, name)
582 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
584 rfc822_maint = "%s <%s>" % (name, email)
585 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
587 if email.find("@") == -1 and email.find("buildd_") != 0:
588 raise ParseMaintError, "No @ found in email address part."
590 return (rfc822_maint, rfc2047_maint, name, email)
592 ################################################################################
594 def send_mail (message, filename=""):
595 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
597 # If we've been passed a string dump it into a temporary file
599 (fd, filename) = tempfile.mkstemp()
600 os.write (fd, message)
603 if Cnf.has_key("Dinstall::MailWhiteList") and \
604 Cnf["Dinstall::MailWhiteList"] != "":
605 message_in = open_file(filename)
606 message_raw = modemail.message_from_file(message_in)
610 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
612 for line in whitelist_in:
613 if re_re_mark.match(line):
614 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
616 whitelist.append(re.compile(re.escape(line.strip())))
621 fields = ["To", "Bcc", "Cc"]
624 value = message_raw.get(field, None)
627 for item in value.split(","):
628 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
634 if not mail_whitelisted:
635 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
639 # Doesn't have any mail in whitelist so remove the header
641 del message_raw[field]
643 message_raw.replace_header(field, string.join(match, ", "))
645 # Change message fields in order if we don't have a To header
646 if not message_raw.has_key("To"):
649 if message_raw.has_key(field):
650 message_raw[fields[-1]] = message_raw[field]
651 del message_raw[field]
654 # Clean up any temporary files
655 # and return, as we removed all recipients.
657 os.unlink (filename);
660 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
661 os.write (fd, message_raw.as_string(True));
665 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
667 raise SendmailFailedError, output
669 # Clean up any temporary files
673 ################################################################################
675 def poolify (source, component):
678 if source[:3] == "lib":
679 return component + source[:4] + '/' + source + '/'
681 return component + source[:1] + '/' + source + '/'
683 ################################################################################
685 def move (src, dest, overwrite = 0, perms = 0664):
686 if os.path.exists(dest) and os.path.isdir(dest):
689 dest_dir = os.path.dirname(dest)
690 if not os.path.exists(dest_dir):
691 umask = os.umask(00000)
692 os.makedirs(dest_dir, 02775)
694 #print "Moving %s to %s..." % (src, dest)
695 if os.path.exists(dest) and os.path.isdir(dest):
696 dest += '/' + os.path.basename(src)
697 # Don't overwrite unless forced to
698 if os.path.exists(dest):
700 fubar("Can't move %s to %s - file already exists." % (src, dest))
702 if not os.access(dest, os.W_OK):
703 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
704 shutil.copy2(src, dest)
705 os.chmod(dest, perms)
708 def copy (src, dest, overwrite = 0, perms = 0664):
709 if os.path.exists(dest) and os.path.isdir(dest):
712 dest_dir = os.path.dirname(dest)
713 if not os.path.exists(dest_dir):
714 umask = os.umask(00000)
715 os.makedirs(dest_dir, 02775)
717 #print "Copying %s to %s..." % (src, dest)
718 if os.path.exists(dest) and os.path.isdir(dest):
719 dest += '/' + os.path.basename(src)
720 # Don't overwrite unless forced to
721 if os.path.exists(dest):
723 raise FileExistsError
725 if not os.access(dest, os.W_OK):
726 raise CantOverwriteError
727 shutil.copy2(src, dest)
728 os.chmod(dest, perms)
730 ################################################################################
733 res = socket.gethostbyaddr(socket.gethostname())
734 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
735 if database_hostname:
736 return database_hostname
740 def which_conf_file ():
741 res = socket.gethostbyaddr(socket.gethostname())
742 if Cnf.get("Config::" + res[0] + "::DakConfig"):
743 return Cnf["Config::" + res[0] + "::DakConfig"]
745 return default_config
747 def which_apt_conf_file ():
748 res = socket.gethostbyaddr(socket.gethostname())
749 if Cnf.get("Config::" + res[0] + "::AptConfig"):
750 return Cnf["Config::" + res[0] + "::AptConfig"]
752 return default_apt_config
754 def which_alias_file():
755 hostname = socket.gethostbyaddr(socket.gethostname())[0]
756 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
757 if os.path.exists(aliasfn):
762 ################################################################################
764 # Escape characters which have meaning to SQL's regex comparison operator ('~')
765 # (woefully incomplete)
768 s = s.replace('+', '\\\\+')
769 s = s.replace('.', '\\\\.')
772 ################################################################################
774 def TemplateSubst(map, filename):
775 """ Perform a substition of template """
776 templatefile = open_file(filename)
777 template = templatefile.read()
779 template = template.replace(x,map[x])
783 ################################################################################
785 def fubar(msg, exit_code=1):
786 sys.stderr.write("E: %s\n" % (msg))
790 sys.stderr.write("W: %s\n" % (msg))
792 ################################################################################
794 # Returns the user name with a laughable attempt at rfc822 conformancy
795 # (read: removing stray periods).
797 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
799 ################################################################################
809 return ("%d%s" % (c, t))
811 ################################################################################
813 def cc_fix_changes (changes):
814 o = changes.get("architecture", "")
816 del changes["architecture"]
817 changes["architecture"] = {}
819 changes["architecture"][j] = 1
821 def changes_compare (a, b):
822 """ Sort by source name, source version, 'have source', and then by filename """
824 a_changes = parse_changes(a)
829 b_changes = parse_changes(b)
833 cc_fix_changes (a_changes)
834 cc_fix_changes (b_changes)
836 # Sort by source name
837 a_source = a_changes.get("source")
838 b_source = b_changes.get("source")
839 q = cmp (a_source, b_source)
843 # Sort by source version
844 a_version = a_changes.get("version", "0")
845 b_version = b_changes.get("version", "0")
846 q = apt_pkg.VersionCompare(a_version, b_version)
850 # Sort by 'have source'
851 a_has_source = a_changes["architecture"].get("source")
852 b_has_source = b_changes["architecture"].get("source")
853 if a_has_source and not b_has_source:
855 elif b_has_source and not a_has_source:
858 # Fall back to sort by filename
861 ################################################################################
863 def find_next_free (dest, too_many=100):
866 while os.path.exists(dest) and extra < too_many:
867 dest = orig_dest + '.' + repr(extra)
869 if extra >= too_many:
870 raise NoFreeFilenameError
873 ################################################################################
875 def result_join (original, sep = '\t'):
877 for i in xrange(len(original)):
878 if original[i] == None:
879 resultlist.append("")
881 resultlist.append(original[i])
882 return sep.join(resultlist)
884 ################################################################################
886 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
888 for line in str.split('\n'):
890 if line or include_blank_lines:
891 out += "%s%s\n" % (prefix, line)
892 # Strip trailing new line
897 ################################################################################
899 def validate_changes_file_arg(filename, require_changes=1):
901 'filename' is either a .changes or .dak file. If 'filename' is a
902 .dak file, it's changed to be the corresponding .changes file. The
903 function then checks if the .changes file a) exists and b) is
904 readable and returns the .changes filename if so. If there's a
905 problem, the next action depends on the option 'require_changes'
908 - If 'require_changes' == -1, errors are ignored and the .changes
909 filename is returned.
910 - If 'require_changes' == 0, a warning is given and 'None' is returned.
911 - If 'require_changes' == 1, a fatal error is raised.
916 orig_filename = filename
917 if filename.endswith(".dak"):
918 filename = filename[:-4]+".changes"
920 if not filename.endswith(".changes"):
921 error = "invalid file type; not a changes file"
923 if not os.access(filename,os.R_OK):
924 if os.path.exists(filename):
925 error = "permission denied"
927 error = "file not found"
930 if require_changes == 1:
931 fubar("%s: %s." % (orig_filename, error))
932 elif require_changes == 0:
933 warn("Skipping %s - %s" % (orig_filename, error))
935 else: # We only care about the .dak file
940 ################################################################################
943 return (arch != "source" and arch != "all")
945 ################################################################################
947 def join_with_commas_and(list):
948 if len(list) == 0: return "nothing"
949 if len(list) == 1: return list[0]
950 return ", ".join(list[:-1]) + " and " + list[-1]
952 ################################################################################
957 (pkg, version, constraint) = atom
959 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
962 pp_deps.append(pp_dep)
963 return " |".join(pp_deps)
965 ################################################################################
970 ################################################################################
972 def parse_args(Options):
973 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
977 for suite in split_args(Options["Suite"]):
978 suite_id = database.get_suite_id(suite)
980 warn("suite '%s' not recognised." % (suite))
982 suite_ids_list.append(suite_id)
984 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
986 fubar("No valid suite given.")
991 if Options["Component"]:
992 component_ids_list = []
993 for component in split_args(Options["Component"]):
994 component_id = database.get_component_id(component)
995 if component_id == -1:
996 warn("component '%s' not recognised." % (component))
998 component_ids_list.append(component_id)
999 if component_ids_list:
1000 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1002 fubar("No valid component given.")
1006 # Process architecture
1007 con_architectures = ""
1008 if Options["Architecture"]:
1011 for architecture in split_args(Options["Architecture"]):
1012 if architecture == "source":
1015 architecture_id = database.get_architecture_id(architecture)
1016 if architecture_id == -1:
1017 warn("architecture '%s' not recognised." % (architecture))
1019 arch_ids_list.append(architecture_id)
1021 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1023 if not check_source:
1024 fubar("No valid architecture given.")
1028 return (con_suites, con_architectures, con_components, check_source)
1030 ################################################################################
1032 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1033 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1036 tb = sys.exc_info()[2]
1043 frame = frame.f_back
1045 traceback.print_exc()
1047 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1048 frame.f_code.co_filename,
1050 for key, value in frame.f_locals.items():
1051 print "\t%20s = " % key,
1055 print "<unable to print>"
1057 ################################################################################
1059 def try_with_debug(function):
1067 ################################################################################
1069 def arch_compare_sw (a, b):
1071 Function for use in sorting lists of architectures.
1073 Sorts normally except that 'source' dominates all others.
1076 if a == "source" and b == "source":
1085 ################################################################################
1087 def split_args (s, dwim=1):
1089 Split command line arguments which can be separated by either commas
1090 or whitespace. If dwim is set, it will complain about string ending
1091 in comma since this usually means someone did 'dak ls -a i386, m68k
1092 foo' or something and the inevitable confusion resulting from 'm68k'
1093 being treated as an argument is undesirable.
1096 if s.find(",") == -1:
1099 if s[-1:] == "," and dwim:
1100 fubar("split_args: found trailing comma, spurious space maybe?")
1103 ################################################################################
1105 def Dict(**dict): return dict
1107 ########################################
1109 def gpgv_get_status_output(cmd, status_read, status_write):
1111 Our very own version of commands.getouputstatus(), hacked to support
1115 cmd = ['/bin/sh', '-c', cmd]
1116 p2cread, p2cwrite = os.pipe()
1117 c2pread, c2pwrite = os.pipe()
1118 errout, errin = os.pipe()
1128 for i in range(3, 256):
1129 if i != status_write:
1135 os.execvp(cmd[0], cmd)
1141 os.dup2(c2pread, c2pwrite)
1142 os.dup2(errout, errin)
1144 output = status = ""
1146 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1149 r = os.read(fd, 8196)
1151 more_data.append(fd)
1152 if fd == c2pwrite or fd == errin:
1154 elif fd == status_read:
1157 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1159 pid, exit_status = os.waitpid(pid, 0)
1161 os.close(status_write)
1162 os.close(status_read)
1172 return output, status, exit_status
1174 ################################################################################
1176 def process_gpgv_output(status):
1177 # Process the status-fd output
1180 for line in status.split('\n'):
1184 split = line.split()
1186 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1188 (gnupg, keyword) = split[:2]
1189 if gnupg != "[GNUPG:]":
1190 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1193 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1194 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1197 keywords[keyword] = args
1199 return (keywords, internal_error)
1201 ################################################################################
1203 def retrieve_key (filename, keyserver=None, keyring=None):
1205 Retrieve the key that signed 'filename' from 'keyserver' and
1206 add it to 'keyring'. Returns nothing on success, or an error message
1210 # Defaults for keyserver and keyring
1212 keyserver = Cnf["Dinstall::KeyServer"]
1214 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1216 # Ensure the filename contains no shell meta-characters or other badness
1217 if not re_taint_free.match(filename):
1218 return "%s: tainted filename" % (filename)
1220 # Invoke gpgv on the file
1221 status_read, status_write = os.pipe()
1222 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1223 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1225 # Process the status-fd output
1226 (keywords, internal_error) = process_gpgv_output(status)
1228 return internal_error
1230 if not keywords.has_key("NO_PUBKEY"):
1231 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1233 fingerprint = keywords["NO_PUBKEY"][0]
1234 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1235 # it'll try to create a lockfile in /dev. A better solution might
1236 # be a tempfile or something.
1237 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1238 % (Cnf["Dinstall::SigningKeyring"])
1239 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1240 % (keyring, keyserver, fingerprint)
1241 (result, output) = commands.getstatusoutput(cmd)
1243 return "'%s' failed with exit code %s" % (cmd, result)
1247 ################################################################################
1249 def gpg_keyring_args(keyrings=None):
1251 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1253 return " ".join(["--keyring %s" % x for x in keyrings])
1255 ################################################################################
1257 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1259 Check the signature of a file and return the fingerprint if the
1260 signature is valid or 'None' if it's not. The first argument is the
1261 filename whose signature should be checked. The second argument is a
1262 reject function and is called when an error is found. The reject()
1263 function must allow for two arguments: the first is the error message,
1264 the second is an optional prefix string. It's possible for reject()
1265 to be called more than once during an invocation of check_signature().
1266 The third argument is optional and is the name of the files the
1267 detached signature applies to. The fourth argument is optional and is
1268 a *list* of keyrings to use. 'autofetch' can either be None, True or
1269 False. If None, the default behaviour specified in the config will be
1273 # Ensure the filename contains no shell meta-characters or other badness
1274 if not re_taint_free.match(sig_filename):
1275 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1278 if data_filename and not re_taint_free.match(data_filename):
1279 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1283 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1285 # Autofetch the signing key if that's enabled
1286 if autofetch == None:
1287 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1289 error_msg = retrieve_key(sig_filename)
1294 # Build the command line
1295 status_read, status_write = os.pipe()
1296 cmd = "gpgv --status-fd %s %s %s %s" % (
1297 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1299 # Invoke gpgv on the file
1300 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1302 # Process the status-fd output
1303 (keywords, internal_error) = process_gpgv_output(status)
1305 # If we failed to parse the status-fd output, let's just whine and bail now
1307 reject("internal error while performing signature check on %s." % (sig_filename))
1308 reject(internal_error, "")
1309 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1313 # Now check for obviously bad things in the processed output
1314 if keywords.has_key("KEYREVOKED"):
1315 reject("The key used to sign %s has been revoked." % (sig_filename))
1317 if keywords.has_key("BADSIG"):
1318 reject("bad signature on %s." % (sig_filename))
1320 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1321 reject("failed to check signature on %s." % (sig_filename))
1323 if keywords.has_key("NO_PUBKEY"):
1324 args = keywords["NO_PUBKEY"]
1327 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1329 if keywords.has_key("BADARMOR"):
1330 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1332 if keywords.has_key("NODATA"):
1333 reject("no signature found in %s." % (sig_filename))
1335 if keywords.has_key("EXPKEYSIG"):
1336 args = keywords["EXPKEYSIG"]
1339 reject("Signature made by expired key 0x%s" % (key))
1341 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1342 args = keywords["KEYEXPIRED"]
1346 if timestamp.count("T") == 0:
1347 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1349 expiredate = timestamp
1350 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1356 # Next check gpgv exited with a zero return code
1358 reject("gpgv failed while checking %s." % (sig_filename))
1360 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1362 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1365 # Sanity check the good stuff we expect
1366 if not keywords.has_key("VALIDSIG"):
1367 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1370 args = keywords["VALIDSIG"]
1372 reject("internal error while checking signature on %s." % (sig_filename))
1375 fingerprint = args[0]
1376 if not keywords.has_key("GOODSIG"):
1377 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1379 if not keywords.has_key("SIG_ID"):
1380 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1383 # Finally ensure there's not something we don't recognise
1384 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1385 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1386 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1388 for keyword in keywords.keys():
1389 if not known_keywords.has_key(keyword):
1390 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1398 ################################################################################
1400 def gpg_get_key_addresses(fingerprint):
1401 """retreive email addresses from gpg key uids for a given fingerprint"""
1402 addresses = key_uid_email_cache.get(fingerprint)
1403 if addresses != None:
1406 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1407 % (gpg_keyring_args(), fingerprint)
1408 (result, output) = commands.getstatusoutput(cmd)
1410 for l in output.split('\n'):
1411 m = re_gpg_uid.match(l)
1413 addresses.add(m.group(1))
1414 key_uid_email_cache[fingerprint] = addresses
1417 ################################################################################
1419 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1421 def wrap(paragraph, max_length, prefix=""):
1425 words = paragraph.split()
1428 word_size = len(word)
1429 if word_size > max_length:
1431 s += line + '\n' + prefix
1432 s += word + '\n' + prefix
1435 new_length = len(line) + word_size + 1
1436 if new_length > max_length:
1437 s += line + '\n' + prefix
1450 ################################################################################
1452 def clean_symlink (src, dest, root):
1454 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1457 src = src.replace(root, '', 1)
1458 dest = dest.replace(root, '', 1)
1459 dest = os.path.dirname(dest)
1460 new_src = '../' * len(dest.split('/'))
1461 return new_src + src
1463 ################################################################################
1465 def temp_filename(directory=None, prefix="dak", suffix=""):
1467 Return a secure and unique filename by pre-creating it.
1468 If 'directory' is non-null, it will be the directory the file is pre-created in.
1469 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1470 If 'suffix' is non-null, the filename will end with it.
1472 Returns a pair (fd, name).
1475 return tempfile.mkstemp(suffix, prefix, directory)
1477 ################################################################################
1479 def is_email_alias(email):
1480 """ checks if the user part of the email is listed in the alias file """
1482 if alias_cache == None:
1483 aliasfn = which_alias_file()
1486 for l in open(aliasfn):
1487 alias_cache.add(l.split(':')[0])
1488 uid = email.split('@')[0]
1489 return uid in alias_cache
1491 ################################################################################
1495 Cnf = apt_pkg.newConfiguration()
1496 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1498 if which_conf_file() != default_config:
1499 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1501 ################################################################################