2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
43 import email as modemail
44 from dak_exceptions import *
45 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
46 re_multi_line_field, re_srchasver, re_verwithext, \
47 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark
49 ################################################################################
51 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
52 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
54 alias_cache = None #: Cache for email alias checks
55 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
57 # (hashname, function, earliest_changes_version)
58 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
59 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
61 ################################################################################
64 """ Escape html chars """
65 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
67 ################################################################################
69 def open_file(filename, mode='r'):
71 Open C{file}, return fileobject.
73 @type filename: string
74 @param filename: path/filename to open
77 @param mode: open mode
80 @return: open fileobject
82 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
86 f = open(filename, mode)
88 raise CantOpenError, filename
91 ################################################################################
93 def our_raw_input(prompt=""):
95 sys.stdout.write(prompt)
101 sys.stderr.write("\nUser interrupt (^D).\n")
104 ################################################################################
106 def extract_component_from_section(section):
109 if section.find('/') != -1:
110 component = section.split('/')[0]
112 # Expand default component
114 if Cnf.has_key("Component::%s" % section):
119 return (section, component)
121 ################################################################################
123 def parse_deb822(contents, signing_rules=0):
127 # Split the lines in the input, keeping the linebreaks.
128 lines = contents.splitlines(True)
131 raise ParseChangesError, "[Empty changes file]"
133 # Reindex by line number so we can easily verify the format of
139 indexed_lines[index] = line[:-1]
143 num_of_lines = len(indexed_lines.keys())
146 while index < num_of_lines:
148 line = indexed_lines[index]
150 if signing_rules == 1:
152 if index > num_of_lines:
153 raise InvalidDscError, index
154 line = indexed_lines[index]
155 if not line.startswith("-----BEGIN PGP SIGNATURE"):
156 raise InvalidDscError, index
161 if line.startswith("-----BEGIN PGP SIGNATURE"):
163 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
165 if signing_rules == 1:
166 while index < num_of_lines and line != "":
168 line = indexed_lines[index]
170 # If we're not inside the signed data, don't process anything
171 if signing_rules >= 0 and not inside_signature:
173 slf = re_single_line_field.match(line)
175 field = slf.groups()[0].lower()
176 changes[field] = slf.groups()[1]
180 changes[field] += '\n'
182 mlf = re_multi_line_field.match(line)
185 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
186 if first == 1 and changes[field] != "":
187 changes[field] += '\n'
189 changes[field] += mlf.groups()[0] + '\n'
193 if signing_rules == 1 and inside_signature:
194 raise InvalidDscError, index
196 changes["filecontents"] = "".join(lines)
198 if changes.has_key("source"):
199 # Strip the source version in brackets from the source field,
200 # put it in the "source-version" field instead.
201 srcver = re_srchasver.search(changes["source"])
203 changes["source"] = srcver.group(1)
204 changes["source-version"] = srcver.group(2)
207 raise ParseChangesError, error
211 ################################################################################
213 def parse_changes(filename, signing_rules=0):
215 Parses a changes file and returns a dictionary where each field is a
216 key. The mandatory first argument is the filename of the .changes
219 signing_rules is an optional argument:
221 - If signing_rules == -1, no signature is required.
222 - If signing_rules == 0 (the default), a signature is required.
223 - If signing_rules == 1, it turns on the same strict format checking
226 The rules for (signing_rules == 1)-mode are:
228 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
229 followed by any PGP header data and must end with a blank line.
231 - The data section must end with a blank line and must be followed by
232 "-----BEGIN PGP SIGNATURE-----".
235 changes_in = open_file(filename)
236 content = changes_in.read()
238 return parse_deb822(content, signing_rules)
240 ################################################################################
242 def hash_key(hashname):
243 return '%ssum' % hashname
245 ################################################################################
247 def create_hash(where, files, hashname, hashfunc):
249 create_hash extends the passed files dict with the given hash by
250 iterating over all files on disk and passing them to the hashing
255 for f in files.keys():
257 file_handle = open_file(f)
258 except CantOpenError:
259 rejmsg.append("Could not open file %s for checksumming" % (f))
261 files[f][hash_key(hashname)] = hashfunc(file_handle)
266 ################################################################################
268 def check_hash(where, files, hashname, hashfunc):
270 check_hash checks the given hash in the files dict against the actual
271 files on disk. The hash values need to be present consistently in
272 all file entries. It does not modify its input in any way.
276 for f in files.keys():
280 file_handle = open_file(f)
282 # Check for the hash entry, to not trigger a KeyError.
283 if not files[f].has_key(hash_key(hashname)):
284 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
288 # Actually check the hash for correctness.
289 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
290 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
292 except CantOpenError:
293 # TODO: This happens when the file is in the pool.
294 # warn("Cannot open file %s" % f)
301 ################################################################################
303 def check_size(where, files):
305 check_size checks the file sizes in the passed files dict against the
310 for f in files.keys():
315 # TODO: This happens when the file is in the pool.
319 actual_size = entry[stat.ST_SIZE]
320 size = int(files[f]["size"])
321 if size != actual_size:
322 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
323 % (f, actual_size, size, where))
326 ################################################################################
328 def check_hash_fields(what, manifest):
330 check_hash_fields ensures that there are no checksum fields in the
331 given dict that we do not know about.
335 hashes = map(lambda x: x[0], known_hashes)
336 for field in manifest:
337 if field.startswith("checksums-"):
338 hashname = field.split("-",1)[1]
339 if hashname not in hashes:
340 rejmsg.append("Unsupported checksum field for %s "\
341 "in %s" % (hashname, what))
344 ################################################################################
346 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
347 if format >= version:
348 # The version should contain the specified hash.
351 # Import hashes from the changes
352 rejmsg = parse_checksums(".changes", files, changes, hashname)
356 # We need to calculate the hash because it can't possibly
359 return func(".changes", files, hashname, hashfunc)
361 # We could add the orig which might be in the pool to the files dict to
362 # access the checksums easily.
364 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
366 ensure_dsc_hashes' task is to ensure that each and every *present* hash
367 in the dsc is correct, i.e. identical to the changes file and if necessary
368 the pool. The latter task is delegated to check_hash.
372 if not dsc.has_key('Checksums-%s' % (hashname,)):
374 # Import hashes from the dsc
375 parse_checksums(".dsc", dsc_files, dsc, hashname)
377 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
380 ################################################################################
382 def ensure_hashes(changes, dsc, files, dsc_files):
385 # Make sure we recognise the format of the Files: field in the .changes
386 format = changes.get("format", "0.0").split(".", 1)
388 format = int(format[0]), int(format[1])
390 format = int(float(format[0])), 0
392 # We need to deal with the original changes blob, as the fields we need
393 # might not be in the changes dict serialised into the .dak anymore.
394 orig_changes = parse_deb822(changes['filecontents'])
396 # Copy the checksums over to the current changes dict. This will keep
397 # the existing modifications to it intact.
398 for field in orig_changes:
399 if field.startswith('checksums-'):
400 changes[field] = orig_changes[field]
402 # Check for unsupported hashes
403 rejmsg.extend(check_hash_fields(".changes", changes))
404 rejmsg.extend(check_hash_fields(".dsc", dsc))
406 # We have to calculate the hash if we have an earlier changes version than
407 # the hash appears in rather than require it exist in the changes file
408 for hashname, hashfunc, version in known_hashes:
409 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
411 if "source" in changes["architecture"]:
412 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
417 def parse_checksums(where, files, manifest, hashname):
419 field = 'checksums-%s' % hashname
420 if not field in manifest:
422 for line in manifest[field].split('\n'):
425 checksum, size, checkfile = line.strip().split(' ')
426 if not files.has_key(checkfile):
427 # TODO: check for the file's entry in the original files dict, not
428 # the one modified by (auto)byhand and other weird stuff
429 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
430 # (file, hashname, where))
432 if not files[checkfile]["size"] == size:
433 rejmsg.append("%s: size differs for files and checksums-%s entry "\
434 "in %s" % (checkfile, hashname, where))
436 files[checkfile][hash_key(hashname)] = checksum
437 for f in files.keys():
438 if not files[f].has_key(hash_key(hashname)):
439 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
443 ################################################################################
445 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
447 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
450 # Make sure we have a Files: field to parse...
451 if not changes.has_key(field):
452 raise NoFilesFieldError
454 # Make sure we recognise the format of the Files: field
455 format = re_verwithext.search(changes.get("format", "0.0"))
457 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
459 format = format.groups()
460 if format[1] == None:
461 format = int(float(format[0])), 0, format[2]
463 format = int(format[0]), int(format[1]), format[2]
464 if format[2] == None:
468 # format = (1,0) are the only formats we currently accept,
469 # format = (0,0) are missing format headers of which we still
470 # have some in the archive.
471 if format != (1,0) and format != (0,0):
472 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
474 if (format < (1,5) or format > (1,8)):
475 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
476 if field != "files" and format < (1,8):
477 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
479 includes_section = (not is_a_dsc) and field == "files"
481 # Parse each entry/line:
482 for i in changes[field].split('\n'):
486 section = priority = ""
489 (md5, size, section, priority, name) = s
491 (md5, size, name) = s
493 raise ParseChangesError, i
500 (section, component) = extract_component_from_section(section)
502 files[name] = Dict(size=size, section=section,
503 priority=priority, component=component)
504 files[name][hashname] = md5
508 ################################################################################
510 def force_to_utf8(s):
512 Forces a string to UTF-8. If the string isn't already UTF-8,
513 it's assumed to be ISO-8859-1.
519 latin1_s = unicode(s,'iso8859-1')
520 return latin1_s.encode('utf-8')
522 def rfc2047_encode(s):
524 Encodes a (header) string per RFC2047 if necessary. If the
525 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
528 codecs.lookup('ascii')[1](s)
533 codecs.lookup('utf-8')[1](s)
534 h = email.Header.Header(s, 'utf-8', 998)
537 h = email.Header.Header(s, 'iso-8859-1', 998)
540 ################################################################################
542 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
543 # with it. I know - I'll fix the suckage and make things
546 def fix_maintainer (maintainer):
548 Parses a Maintainer or Changed-By field and returns:
549 1. an RFC822 compatible version,
550 2. an RFC2047 compatible version,
554 The name is forced to UTF-8 for both 1. and 3.. If the name field
555 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
556 switched to 'email (name)' format.
559 maintainer = maintainer.strip()
561 return ('', '', '', '')
563 if maintainer.find("<") == -1:
566 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
567 email = maintainer[1:-1]
570 m = re_parse_maintainer.match(maintainer)
572 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
576 # Get an RFC2047 compliant version of the name
577 rfc2047_name = rfc2047_encode(name)
579 # Force the name to be UTF-8
580 name = force_to_utf8(name)
582 if name.find(',') != -1 or name.find('.') != -1:
583 rfc822_maint = "%s (%s)" % (email, name)
584 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
586 rfc822_maint = "%s <%s>" % (name, email)
587 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
589 if email.find("@") == -1 and email.find("buildd_") != 0:
590 raise ParseMaintError, "No @ found in email address part."
592 return (rfc822_maint, rfc2047_maint, name, email)
594 ################################################################################
596 def send_mail (message, filename=""):
597 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
599 # If we've been passed a string dump it into a temporary file
601 (fd, filename) = tempfile.mkstemp()
602 os.write (fd, message)
605 if Cnf.has_key("Dinstall::MailWhiteList") and \
606 Cnf["Dinstall::MailWhiteList"] != "":
607 message_in = open_file(filename)
608 message_raw = modemail.message_from_file(message_in)
612 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
614 for line in whitelist_in:
615 if re_re_mark.match(line):
616 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
618 whitelist.append(re.compile(re.escape(line.strip())))
623 fields = ["To", "Bcc", "Cc"]
626 value = message_raw.get(field, None)
629 for item in value.split(","):
630 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
636 if not mail_whitelisted:
637 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
641 # Doesn't have any mail in whitelist so remove the header
643 del message_raw[field]
645 message_raw.replace_header(field, string.join(match, ", "))
647 # Change message fields in order if we don't have a To header
648 if not message_raw.has_key("To"):
651 if message_raw.has_key(field):
652 message_raw[fields[-1]] = message_raw[field]
653 del message_raw[field]
656 # Clean up any temporary files
657 # and return, as we removed all recipients.
659 os.unlink (filename);
662 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
663 os.write (fd, message_raw.as_string(True));
667 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
669 raise SendmailFailedError, output
671 # Clean up any temporary files
675 ################################################################################
677 def poolify (source, component):
680 if source[:3] == "lib":
681 return component + source[:4] + '/' + source + '/'
683 return component + source[:1] + '/' + source + '/'
685 ################################################################################
687 def move (src, dest, overwrite = 0, perms = 0664):
688 if os.path.exists(dest) and os.path.isdir(dest):
691 dest_dir = os.path.dirname(dest)
692 if not os.path.exists(dest_dir):
693 umask = os.umask(00000)
694 os.makedirs(dest_dir, 02775)
696 #print "Moving %s to %s..." % (src, dest)
697 if os.path.exists(dest) and os.path.isdir(dest):
698 dest += '/' + os.path.basename(src)
699 # Don't overwrite unless forced to
700 if os.path.exists(dest):
702 fubar("Can't move %s to %s - file already exists." % (src, dest))
704 if not os.access(dest, os.W_OK):
705 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
706 shutil.copy2(src, dest)
707 os.chmod(dest, perms)
710 def copy (src, dest, overwrite = 0, perms = 0664):
711 if os.path.exists(dest) and os.path.isdir(dest):
714 dest_dir = os.path.dirname(dest)
715 if not os.path.exists(dest_dir):
716 umask = os.umask(00000)
717 os.makedirs(dest_dir, 02775)
719 #print "Copying %s to %s..." % (src, dest)
720 if os.path.exists(dest) and os.path.isdir(dest):
721 dest += '/' + os.path.basename(src)
722 # Don't overwrite unless forced to
723 if os.path.exists(dest):
725 raise FileExistsError
727 if not os.access(dest, os.W_OK):
728 raise CantOverwriteError
729 shutil.copy2(src, dest)
730 os.chmod(dest, perms)
732 ################################################################################
735 res = socket.gethostbyaddr(socket.gethostname())
736 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
737 if database_hostname:
738 return database_hostname
742 def which_conf_file ():
743 res = socket.gethostbyaddr(socket.gethostname())
744 if Cnf.get("Config::" + res[0] + "::DakConfig"):
745 return Cnf["Config::" + res[0] + "::DakConfig"]
747 return default_config
749 def which_apt_conf_file ():
750 res = socket.gethostbyaddr(socket.gethostname())
751 if Cnf.get("Config::" + res[0] + "::AptConfig"):
752 return Cnf["Config::" + res[0] + "::AptConfig"]
754 return default_apt_config
756 def which_alias_file():
757 hostname = socket.gethostbyaddr(socket.gethostname())[0]
758 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
759 if os.path.exists(aliasfn):
764 ################################################################################
766 # Escape characters which have meaning to SQL's regex comparison operator ('~')
767 # (woefully incomplete)
770 s = s.replace('+', '\\\\+')
771 s = s.replace('.', '\\\\.')
774 ################################################################################
776 def TemplateSubst(map, filename):
777 """ Perform a substition of template """
778 templatefile = open_file(filename)
779 template = templatefile.read()
781 template = template.replace(x,map[x])
785 ################################################################################
787 def fubar(msg, exit_code=1):
788 sys.stderr.write("E: %s\n" % (msg))
792 sys.stderr.write("W: %s\n" % (msg))
794 ################################################################################
796 # Returns the user name with a laughable attempt at rfc822 conformancy
797 # (read: removing stray periods).
799 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
801 ################################################################################
811 return ("%d%s" % (c, t))
813 ################################################################################
815 def cc_fix_changes (changes):
816 o = changes.get("architecture", "")
818 del changes["architecture"]
819 changes["architecture"] = {}
821 changes["architecture"][j] = 1
823 def changes_compare (a, b):
824 """ Sort by source name, source version, 'have source', and then by filename """
826 a_changes = parse_changes(a)
831 b_changes = parse_changes(b)
835 cc_fix_changes (a_changes)
836 cc_fix_changes (b_changes)
838 # Sort by source name
839 a_source = a_changes.get("source")
840 b_source = b_changes.get("source")
841 q = cmp (a_source, b_source)
845 # Sort by source version
846 a_version = a_changes.get("version", "0")
847 b_version = b_changes.get("version", "0")
848 q = apt_pkg.VersionCompare(a_version, b_version)
852 # Sort by 'have source'
853 a_has_source = a_changes["architecture"].get("source")
854 b_has_source = b_changes["architecture"].get("source")
855 if a_has_source and not b_has_source:
857 elif b_has_source and not a_has_source:
860 # Fall back to sort by filename
863 ################################################################################
865 def find_next_free (dest, too_many=100):
868 while os.path.exists(dest) and extra < too_many:
869 dest = orig_dest + '.' + repr(extra)
871 if extra >= too_many:
872 raise NoFreeFilenameError
875 ################################################################################
877 def result_join (original, sep = '\t'):
879 for i in xrange(len(original)):
880 if original[i] == None:
881 resultlist.append("")
883 resultlist.append(original[i])
884 return sep.join(resultlist)
886 ################################################################################
888 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
890 for line in str.split('\n'):
892 if line or include_blank_lines:
893 out += "%s%s\n" % (prefix, line)
894 # Strip trailing new line
899 ################################################################################
901 def validate_changes_file_arg(filename, require_changes=1):
903 'filename' is either a .changes or .dak file. If 'filename' is a
904 .dak file, it's changed to be the corresponding .changes file. The
905 function then checks if the .changes file a) exists and b) is
906 readable and returns the .changes filename if so. If there's a
907 problem, the next action depends on the option 'require_changes'
910 - If 'require_changes' == -1, errors are ignored and the .changes
911 filename is returned.
912 - If 'require_changes' == 0, a warning is given and 'None' is returned.
913 - If 'require_changes' == 1, a fatal error is raised.
918 orig_filename = filename
919 if filename.endswith(".dak"):
920 filename = filename[:-4]+".changes"
922 if not filename.endswith(".changes"):
923 error = "invalid file type; not a changes file"
925 if not os.access(filename,os.R_OK):
926 if os.path.exists(filename):
927 error = "permission denied"
929 error = "file not found"
932 if require_changes == 1:
933 fubar("%s: %s." % (orig_filename, error))
934 elif require_changes == 0:
935 warn("Skipping %s - %s" % (orig_filename, error))
937 else: # We only care about the .dak file
942 ################################################################################
945 return (arch != "source" and arch != "all")
947 ################################################################################
949 def join_with_commas_and(list):
950 if len(list) == 0: return "nothing"
951 if len(list) == 1: return list[0]
952 return ", ".join(list[:-1]) + " and " + list[-1]
954 ################################################################################
959 (pkg, version, constraint) = atom
961 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
964 pp_deps.append(pp_dep)
965 return " |".join(pp_deps)
967 ################################################################################
972 ################################################################################
974 def parse_args(Options):
975 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
979 for suite in split_args(Options["Suite"]):
980 suite_id = database.get_suite_id(suite)
982 warn("suite '%s' not recognised." % (suite))
984 suite_ids_list.append(suite_id)
986 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
988 fubar("No valid suite given.")
993 if Options["Component"]:
994 component_ids_list = []
995 for component in split_args(Options["Component"]):
996 component_id = database.get_component_id(component)
997 if component_id == -1:
998 warn("component '%s' not recognised." % (component))
1000 component_ids_list.append(component_id)
1001 if component_ids_list:
1002 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1004 fubar("No valid component given.")
1008 # Process architecture
1009 con_architectures = ""
1010 if Options["Architecture"]:
1013 for architecture in split_args(Options["Architecture"]):
1014 if architecture == "source":
1017 architecture_id = database.get_architecture_id(architecture)
1018 if architecture_id == -1:
1019 warn("architecture '%s' not recognised." % (architecture))
1021 arch_ids_list.append(architecture_id)
1023 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1025 if not check_source:
1026 fubar("No valid architecture given.")
1030 return (con_suites, con_architectures, con_components, check_source)
1032 ################################################################################
1034 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1035 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1038 tb = sys.exc_info()[2]
1045 frame = frame.f_back
1047 traceback.print_exc()
1049 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1050 frame.f_code.co_filename,
1052 for key, value in frame.f_locals.items():
1053 print "\t%20s = " % key,
1057 print "<unable to print>"
1059 ################################################################################
1061 def try_with_debug(function):
1069 ################################################################################
1071 def arch_compare_sw (a, b):
1073 Function for use in sorting lists of architectures.
1075 Sorts normally except that 'source' dominates all others.
1078 if a == "source" and b == "source":
1087 ################################################################################
1089 def split_args (s, dwim=1):
1091 Split command line arguments which can be separated by either commas
1092 or whitespace. If dwim is set, it will complain about string ending
1093 in comma since this usually means someone did 'dak ls -a i386, m68k
1094 foo' or something and the inevitable confusion resulting from 'm68k'
1095 being treated as an argument is undesirable.
1098 if s.find(",") == -1:
1101 if s[-1:] == "," and dwim:
1102 fubar("split_args: found trailing comma, spurious space maybe?")
1105 ################################################################################
1107 def Dict(**dict): return dict
1109 ########################################
1111 def gpgv_get_status_output(cmd, status_read, status_write):
1113 Our very own version of commands.getouputstatus(), hacked to support
1117 cmd = ['/bin/sh', '-c', cmd]
1118 p2cread, p2cwrite = os.pipe()
1119 c2pread, c2pwrite = os.pipe()
1120 errout, errin = os.pipe()
1130 for i in range(3, 256):
1131 if i != status_write:
1137 os.execvp(cmd[0], cmd)
1143 os.dup2(c2pread, c2pwrite)
1144 os.dup2(errout, errin)
1146 output = status = ""
1148 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1151 r = os.read(fd, 8196)
1153 more_data.append(fd)
1154 if fd == c2pwrite or fd == errin:
1156 elif fd == status_read:
1159 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1161 pid, exit_status = os.waitpid(pid, 0)
1163 os.close(status_write)
1164 os.close(status_read)
1174 return output, status, exit_status
1176 ################################################################################
1178 def process_gpgv_output(status):
1179 # Process the status-fd output
1182 for line in status.split('\n'):
1186 split = line.split()
1188 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1190 (gnupg, keyword) = split[:2]
1191 if gnupg != "[GNUPG:]":
1192 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1195 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1196 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1199 keywords[keyword] = args
1201 return (keywords, internal_error)
1203 ################################################################################
1205 def retrieve_key (filename, keyserver=None, keyring=None):
1207 Retrieve the key that signed 'filename' from 'keyserver' and
1208 add it to 'keyring'. Returns nothing on success, or an error message
1212 # Defaults for keyserver and keyring
1214 keyserver = Cnf["Dinstall::KeyServer"]
1216 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1218 # Ensure the filename contains no shell meta-characters or other badness
1219 if not re_taint_free.match(filename):
1220 return "%s: tainted filename" % (filename)
1222 # Invoke gpgv on the file
1223 status_read, status_write = os.pipe()
1224 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1225 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1227 # Process the status-fd output
1228 (keywords, internal_error) = process_gpgv_output(status)
1230 return internal_error
1232 if not keywords.has_key("NO_PUBKEY"):
1233 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1235 fingerprint = keywords["NO_PUBKEY"][0]
1236 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1237 # it'll try to create a lockfile in /dev. A better solution might
1238 # be a tempfile or something.
1239 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1240 % (Cnf["Dinstall::SigningKeyring"])
1241 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1242 % (keyring, keyserver, fingerprint)
1243 (result, output) = commands.getstatusoutput(cmd)
1245 return "'%s' failed with exit code %s" % (cmd, result)
1249 ################################################################################
1251 def gpg_keyring_args(keyrings=None):
1253 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1255 return " ".join(["--keyring %s" % x for x in keyrings])
1257 ################################################################################
1259 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1261 Check the signature of a file and return the fingerprint if the
1262 signature is valid or 'None' if it's not. The first argument is the
1263 filename whose signature should be checked. The second argument is a
1264 reject function and is called when an error is found. The reject()
1265 function must allow for two arguments: the first is the error message,
1266 the second is an optional prefix string. It's possible for reject()
1267 to be called more than once during an invocation of check_signature().
1268 The third argument is optional and is the name of the files the
1269 detached signature applies to. The fourth argument is optional and is
1270 a *list* of keyrings to use. 'autofetch' can either be None, True or
1271 False. If None, the default behaviour specified in the config will be
1275 # Ensure the filename contains no shell meta-characters or other badness
1276 if not re_taint_free.match(sig_filename):
1277 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1280 if data_filename and not re_taint_free.match(data_filename):
1281 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1285 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1287 # Autofetch the signing key if that's enabled
1288 if autofetch == None:
1289 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1291 error_msg = retrieve_key(sig_filename)
1296 # Build the command line
1297 status_read, status_write = os.pipe()
1298 cmd = "gpgv --status-fd %s %s %s %s" % (
1299 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1301 # Invoke gpgv on the file
1302 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1304 # Process the status-fd output
1305 (keywords, internal_error) = process_gpgv_output(status)
1307 # If we failed to parse the status-fd output, let's just whine and bail now
1309 reject("internal error while performing signature check on %s." % (sig_filename))
1310 reject(internal_error, "")
1311 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1315 # Now check for obviously bad things in the processed output
1316 if keywords.has_key("KEYREVOKED"):
1317 reject("The key used to sign %s has been revoked." % (sig_filename))
1319 if keywords.has_key("BADSIG"):
1320 reject("bad signature on %s." % (sig_filename))
1322 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1323 reject("failed to check signature on %s." % (sig_filename))
1325 if keywords.has_key("NO_PUBKEY"):
1326 args = keywords["NO_PUBKEY"]
1329 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1331 if keywords.has_key("BADARMOR"):
1332 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1334 if keywords.has_key("NODATA"):
1335 reject("no signature found in %s." % (sig_filename))
1337 if keywords.has_key("EXPKEYSIG"):
1338 args = keywords["EXPKEYSIG"]
1341 reject("Signature made by expired key 0x%s" % (key))
1343 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1344 args = keywords["KEYEXPIRED"]
1348 if timestamp.count("T") == 0:
1349 expiredate = time.strftime("%Y-%m-%d", time.gmtime(timestamp))
1351 expiredate = timestamp
1352 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1358 # Next check gpgv exited with a zero return code
1360 reject("gpgv failed while checking %s." % (sig_filename))
1362 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1364 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1367 # Sanity check the good stuff we expect
1368 if not keywords.has_key("VALIDSIG"):
1369 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1372 args = keywords["VALIDSIG"]
1374 reject("internal error while checking signature on %s." % (sig_filename))
1377 fingerprint = args[0]
1378 if not keywords.has_key("GOODSIG"):
1379 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1381 if not keywords.has_key("SIG_ID"):
1382 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1385 # Finally ensure there's not something we don't recognise
1386 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1387 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1388 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1390 for keyword in keywords.keys():
1391 if not known_keywords.has_key(keyword):
1392 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1400 ################################################################################
1402 def gpg_get_key_addresses(fingerprint):
1403 """retreive email addresses from gpg key uids for a given fingerprint"""
1404 addresses = key_uid_email_cache.get(fingerprint)
1405 if addresses != None:
1408 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1409 % (gpg_keyring_args(), fingerprint)
1410 (result, output) = commands.getstatusoutput(cmd)
1412 for l in output.split('\n'):
1413 m = re_gpg_uid.match(l)
1415 addresses.add(m.group(1))
1416 key_uid_email_cache[fingerprint] = addresses
1419 ################################################################################
1421 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1423 def wrap(paragraph, max_length, prefix=""):
1427 words = paragraph.split()
1430 word_size = len(word)
1431 if word_size > max_length:
1433 s += line + '\n' + prefix
1434 s += word + '\n' + prefix
1437 new_length = len(line) + word_size + 1
1438 if new_length > max_length:
1439 s += line + '\n' + prefix
1452 ################################################################################
1454 def clean_symlink (src, dest, root):
1456 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1459 src = src.replace(root, '', 1)
1460 dest = dest.replace(root, '', 1)
1461 dest = os.path.dirname(dest)
1462 new_src = '../' * len(dest.split('/'))
1463 return new_src + src
1465 ################################################################################
1467 def temp_filename(directory=None, prefix="dak", suffix=""):
1469 Return a secure and unique filename by pre-creating it.
1470 If 'directory' is non-null, it will be the directory the file is pre-created in.
1471 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1472 If 'suffix' is non-null, the filename will end with it.
1474 Returns a pair (fd, name).
1477 return tempfile.mkstemp(suffix, prefix, directory)
1479 ################################################################################
1481 def is_email_alias(email):
1482 """ checks if the user part of the email is listed in the alias file """
1484 if alias_cache == None:
1485 aliasfn = which_alias_file()
1488 for l in open(aliasfn):
1489 alias_cache.add(l.split(':')[0])
1490 uid = email.split('@')[0]
1491 return uid in alias_cache
1493 ################################################################################
1497 Cnf = apt_pkg.newConfiguration()
1498 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1500 if which_conf_file() != default_config:
1501 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1503 ################################################################################
1505 def generate_contents_information(filename):
1507 Generate a list of flies contained in a .deb
1509 @type filename: string
1510 @param filename: the path to a data.tar.gz or data.tar.bz2
1513 @return: a list of files in the data.tar.* portion of the .deb
1515 cmd = "ar t %s" % (filename)
1516 (result, output) = commands.getstatusoutput(cmd)
1518 reject("%s: 'ar t' invocation failed." % (filename))
1519 reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
1521 # Ugh ... this is ugly ... Code ripped from process_unchecked.py
1522 chunks = output.split('\n')
1526 cmd = "ar x %s %s" % (filename, chunks[2])
1527 (result, output) = commands.getstatusoutput(cmd)
1529 reject("%s: '%s' invocation failed." % (filename, cmd))
1530 reject(utils.prefix_multi_line_string(output, " [ar output:] "), "")
1532 # Got deb tarballs, now lets go through and determine what bits
1533 # and pieces the deb had ...
1534 if chunks[2] == "data.tar.gz":
1535 data = tarfile.open("data.tar.gz", "r:gz")
1536 elif chunks[2] == "data.tar.bz2":
1537 data = tarfile.open("data.tar.bz2", "r:bz2")
1539 os.remove(chunks[2])
1540 reject("couldn't find data.tar.*")
1542 for tarinfo in data:
1543 if not tarinfo.isdir():
1544 contents.append(tarinfo.name[2:])
1547 if os.path.exists( chunks[2] ):
1548 shutil.rmtree( chunks[2] )
1549 os.remove( chunks[2] )
1553 ###############################################################################