2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
43 from dak_exceptions import *
44 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
45 re_multi_line_field, re_srchasver, re_verwithext, \
46 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark
48 ################################################################################
50 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
51 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
53 alias_cache = None #: Cache for email alias checks
54 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
56 # (hashname, function, earliest_changes_version)
57 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
58 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
60 ################################################################################
63 """ Escape html chars """
64 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
66 ################################################################################
68 def open_file(filename, mode='r'):
70 Open C{file}, return fileobject.
72 @type filename: string
73 @param filename: path/filename to open
76 @param mode: open mode
79 @return: open fileobject
81 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
85 f = open(filename, mode)
87 raise CantOpenError, filename
90 ################################################################################
92 def our_raw_input(prompt=""):
94 sys.stdout.write(prompt)
100 sys.stderr.write("\nUser interrupt (^D).\n")
103 ################################################################################
105 def extract_component_from_section(section):
108 if section.find('/') != -1:
109 component = section.split('/')[0]
111 # Expand default component
113 if Cnf.has_key("Component::%s" % section):
118 return (section, component)
120 ################################################################################
122 def parse_deb822(contents, signing_rules=0):
126 # Split the lines in the input, keeping the linebreaks.
127 lines = contents.splitlines(True)
130 raise ParseChangesError, "[Empty changes file]"
132 # Reindex by line number so we can easily verify the format of
138 indexed_lines[index] = line[:-1]
142 num_of_lines = len(indexed_lines.keys())
145 while index < num_of_lines:
147 line = indexed_lines[index]
149 if signing_rules == 1:
151 if index > num_of_lines:
152 raise InvalidDscError, index
153 line = indexed_lines[index]
154 if not line.startswith("-----BEGIN PGP SIGNATURE"):
155 raise InvalidDscError, index
160 if line.startswith("-----BEGIN PGP SIGNATURE"):
162 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
164 if signing_rules == 1:
165 while index < num_of_lines and line != "":
167 line = indexed_lines[index]
169 # If we're not inside the signed data, don't process anything
170 if signing_rules >= 0 and not inside_signature:
172 slf = re_single_line_field.match(line)
174 field = slf.groups()[0].lower()
175 changes[field] = slf.groups()[1]
179 changes[field] += '\n'
181 mlf = re_multi_line_field.match(line)
184 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
185 if first == 1 and changes[field] != "":
186 changes[field] += '\n'
188 changes[field] += mlf.groups()[0] + '\n'
192 if signing_rules == 1 and inside_signature:
193 raise InvalidDscError, index
195 changes["filecontents"] = "".join(lines)
197 if changes.has_key("source"):
198 # Strip the source version in brackets from the source field,
199 # put it in the "source-version" field instead.
200 srcver = re_srchasver.search(changes["source"])
202 changes["source"] = srcver.group(1)
203 changes["source-version"] = srcver.group(2)
206 raise ParseChangesError, error
210 ################################################################################
212 def parse_changes(filename, signing_rules=0):
214 Parses a changes file and returns a dictionary where each field is a
215 key. The mandatory first argument is the filename of the .changes
218 signing_rules is an optional argument:
220 - If signing_rules == -1, no signature is required.
221 - If signing_rules == 0 (the default), a signature is required.
222 - If signing_rules == 1, it turns on the same strict format checking
225 The rules for (signing_rules == 1)-mode are:
227 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
228 followed by any PGP header data and must end with a blank line.
230 - The data section must end with a blank line and must be followed by
231 "-----BEGIN PGP SIGNATURE-----".
234 changes_in = open_file(filename)
235 content = changes_in.read()
238 unicode(content, 'utf-8')
240 raise ChangesUnicodeError, "Changes file not proper utf-8"
241 return parse_deb822(content, signing_rules)
243 ################################################################################
245 def hash_key(hashname):
246 return '%ssum' % hashname
248 ################################################################################
250 def create_hash(where, files, hashname, hashfunc):
252 create_hash extends the passed files dict with the given hash by
253 iterating over all files on disk and passing them to the hashing
258 for f in files.keys():
260 file_handle = open_file(f)
261 except CantOpenError:
262 rejmsg.append("Could not open file %s for checksumming" % (f))
264 files[f][hash_key(hashname)] = hashfunc(file_handle)
269 ################################################################################
271 def check_hash(where, files, hashname, hashfunc):
273 check_hash checks the given hash in the files dict against the actual
274 files on disk. The hash values need to be present consistently in
275 all file entries. It does not modify its input in any way.
279 for f in files.keys():
283 file_handle = open_file(f)
285 # Check for the hash entry, to not trigger a KeyError.
286 if not files[f].has_key(hash_key(hashname)):
287 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
291 # Actually check the hash for correctness.
292 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
293 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
295 except CantOpenError:
296 # TODO: This happens when the file is in the pool.
297 # warn("Cannot open file %s" % f)
304 ################################################################################
306 def check_size(where, files):
308 check_size checks the file sizes in the passed files dict against the
313 for f in files.keys():
318 # TODO: This happens when the file is in the pool.
322 actual_size = entry[stat.ST_SIZE]
323 size = int(files[f]["size"])
324 if size != actual_size:
325 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
326 % (f, actual_size, size, where))
329 ################################################################################
331 def check_hash_fields(what, manifest):
333 check_hash_fields ensures that there are no checksum fields in the
334 given dict that we do not know about.
338 hashes = map(lambda x: x[0], known_hashes)
339 for field in manifest:
340 if field.startswith("checksums-"):
341 hashname = field.split("-",1)[1]
342 if hashname not in hashes:
343 rejmsg.append("Unsupported checksum field for %s "\
344 "in %s" % (hashname, what))
347 ################################################################################
349 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
350 if format >= version:
351 # The version should contain the specified hash.
354 # Import hashes from the changes
355 rejmsg = parse_checksums(".changes", files, changes, hashname)
359 # We need to calculate the hash because it can't possibly
362 return func(".changes", files, hashname, hashfunc)
364 # We could add the orig which might be in the pool to the files dict to
365 # access the checksums easily.
367 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
369 ensure_dsc_hashes' task is to ensure that each and every *present* hash
370 in the dsc is correct, i.e. identical to the changes file and if necessary
371 the pool. The latter task is delegated to check_hash.
375 if not dsc.has_key('Checksums-%s' % (hashname,)):
377 # Import hashes from the dsc
378 parse_checksums(".dsc", dsc_files, dsc, hashname)
380 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
383 ################################################################################
385 def ensure_hashes(changes, dsc, files, dsc_files):
388 # Make sure we recognise the format of the Files: field in the .changes
389 format = changes.get("format", "0.0").split(".", 1)
391 format = int(format[0]), int(format[1])
393 format = int(float(format[0])), 0
395 # We need to deal with the original changes blob, as the fields we need
396 # might not be in the changes dict serialised into the .dak anymore.
397 orig_changes = parse_deb822(changes['filecontents'])
399 # Copy the checksums over to the current changes dict. This will keep
400 # the existing modifications to it intact.
401 for field in orig_changes:
402 if field.startswith('checksums-'):
403 changes[field] = orig_changes[field]
405 # Check for unsupported hashes
406 rejmsg.extend(check_hash_fields(".changes", changes))
407 rejmsg.extend(check_hash_fields(".dsc", dsc))
409 # We have to calculate the hash if we have an earlier changes version than
410 # the hash appears in rather than require it exist in the changes file
411 for hashname, hashfunc, version in known_hashes:
412 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
414 if "source" in changes["architecture"]:
415 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
420 def parse_checksums(where, files, manifest, hashname):
422 field = 'checksums-%s' % hashname
423 if not field in manifest:
425 for line in manifest[field].split('\n'):
428 checksum, size, checkfile = line.strip().split(' ')
429 if not files.has_key(checkfile):
430 # TODO: check for the file's entry in the original files dict, not
431 # the one modified by (auto)byhand and other weird stuff
432 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
433 # (file, hashname, where))
435 if not files[checkfile]["size"] == size:
436 rejmsg.append("%s: size differs for files and checksums-%s entry "\
437 "in %s" % (checkfile, hashname, where))
439 files[checkfile][hash_key(hashname)] = checksum
440 for f in files.keys():
441 if not files[f].has_key(hash_key(hashname)):
442 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
446 ################################################################################
448 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
450 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
453 # Make sure we have a Files: field to parse...
454 if not changes.has_key(field):
455 raise NoFilesFieldError
457 # Make sure we recognise the format of the Files: field
458 format = re_verwithext.search(changes.get("format", "0.0"))
460 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
462 format = format.groups()
463 if format[1] == None:
464 format = int(float(format[0])), 0, format[2]
466 format = int(format[0]), int(format[1]), format[2]
467 if format[2] == None:
471 # format = (1,0) are the only formats we currently accept,
472 # format = (0,0) are missing format headers of which we still
473 # have some in the archive.
474 if format != (1,0) and format != (0,0):
475 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
477 if (format < (1,5) or format > (1,8)):
478 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
479 if field != "files" and format < (1,8):
480 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
482 includes_section = (not is_a_dsc) and field == "files"
484 # Parse each entry/line:
485 for i in changes[field].split('\n'):
489 section = priority = ""
492 (md5, size, section, priority, name) = s
494 (md5, size, name) = s
496 raise ParseChangesError, i
503 (section, component) = extract_component_from_section(section)
505 files[name] = Dict(size=size, section=section,
506 priority=priority, component=component)
507 files[name][hashname] = md5
511 ################################################################################
513 def force_to_utf8(s):
515 Forces a string to UTF-8. If the string isn't already UTF-8,
516 it's assumed to be ISO-8859-1.
522 latin1_s = unicode(s,'iso8859-1')
523 return latin1_s.encode('utf-8')
525 def rfc2047_encode(s):
527 Encodes a (header) string per RFC2047 if necessary. If the
528 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
531 codecs.lookup('ascii')[1](s)
536 codecs.lookup('utf-8')[1](s)
537 h = email.Header.Header(s, 'utf-8', 998)
540 h = email.Header.Header(s, 'iso-8859-1', 998)
543 ################################################################################
545 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
546 # with it. I know - I'll fix the suckage and make things
549 def fix_maintainer (maintainer):
551 Parses a Maintainer or Changed-By field and returns:
552 1. an RFC822 compatible version,
553 2. an RFC2047 compatible version,
557 The name is forced to UTF-8 for both 1. and 3.. If the name field
558 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
559 switched to 'email (name)' format.
562 maintainer = maintainer.strip()
564 return ('', '', '', '')
566 if maintainer.find("<") == -1:
569 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
570 email = maintainer[1:-1]
573 m = re_parse_maintainer.match(maintainer)
575 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
579 # Get an RFC2047 compliant version of the name
580 rfc2047_name = rfc2047_encode(name)
582 # Force the name to be UTF-8
583 name = force_to_utf8(name)
585 if name.find(',') != -1 or name.find('.') != -1:
586 rfc822_maint = "%s (%s)" % (email, name)
587 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
589 rfc822_maint = "%s <%s>" % (name, email)
590 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
592 if email.find("@") == -1 and email.find("buildd_") != 0:
593 raise ParseMaintError, "No @ found in email address part."
595 return (rfc822_maint, rfc2047_maint, name, email)
597 ################################################################################
599 def send_mail (message, filename=""):
600 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
602 # If we've been passed a string dump it into a temporary file
604 (fd, filename) = tempfile.mkstemp()
605 os.write (fd, message)
608 if Cnf.has_key("Dinstall::MailWhiteList") and \
609 Cnf["Dinstall::MailWhiteList"] != "":
610 message_in = open_file(filename)
611 message_raw = modemail.message_from_file(message_in)
615 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
617 for line in whitelist_in:
618 if re_re_mark.match(line):
619 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
621 whitelist.append(re.compile(re.escape(line.strip())))
626 fields = ["To", "Bcc", "Cc"]
629 value = message_raw.get(field, None)
632 for item in value.split(","):
633 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
639 if not mail_whitelisted:
640 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
644 # Doesn't have any mail in whitelist so remove the header
646 del message_raw[field]
648 message_raw.replace_header(field, string.join(match, ", "))
650 # Change message fields in order if we don't have a To header
651 if not message_raw.has_key("To"):
654 if message_raw.has_key(field):
655 message_raw[fields[-1]] = message_raw[field]
656 del message_raw[field]
659 # Clean up any temporary files
660 # and return, as we removed all recipients.
662 os.unlink (filename);
665 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
666 os.write (fd, message_raw.as_string(True));
670 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
672 raise SendmailFailedError, output
674 # Clean up any temporary files
678 ################################################################################
680 def poolify (source, component):
683 if source[:3] == "lib":
684 return component + source[:4] + '/' + source + '/'
686 return component + source[:1] + '/' + source + '/'
688 ################################################################################
690 def move (src, dest, overwrite = 0, perms = 0664):
691 if os.path.exists(dest) and os.path.isdir(dest):
694 dest_dir = os.path.dirname(dest)
695 if not os.path.exists(dest_dir):
696 umask = os.umask(00000)
697 os.makedirs(dest_dir, 02775)
699 #print "Moving %s to %s..." % (src, dest)
700 if os.path.exists(dest) and os.path.isdir(dest):
701 dest += '/' + os.path.basename(src)
702 # Don't overwrite unless forced to
703 if os.path.exists(dest):
705 fubar("Can't move %s to %s - file already exists." % (src, dest))
707 if not os.access(dest, os.W_OK):
708 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
709 shutil.copy2(src, dest)
710 os.chmod(dest, perms)
713 def copy (src, dest, overwrite = 0, perms = 0664):
714 if os.path.exists(dest) and os.path.isdir(dest):
717 dest_dir = os.path.dirname(dest)
718 if not os.path.exists(dest_dir):
719 umask = os.umask(00000)
720 os.makedirs(dest_dir, 02775)
722 #print "Copying %s to %s..." % (src, dest)
723 if os.path.exists(dest) and os.path.isdir(dest):
724 dest += '/' + os.path.basename(src)
725 # Don't overwrite unless forced to
726 if os.path.exists(dest):
728 raise FileExistsError
730 if not os.access(dest, os.W_OK):
731 raise CantOverwriteError
732 shutil.copy2(src, dest)
733 os.chmod(dest, perms)
735 ################################################################################
738 res = socket.gethostbyaddr(socket.gethostname())
739 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
740 if database_hostname:
741 return database_hostname
745 def which_conf_file ():
746 res = socket.gethostbyaddr(socket.gethostname())
747 if Cnf.get("Config::" + res[0] + "::DakConfig"):
748 return Cnf["Config::" + res[0] + "::DakConfig"]
750 return default_config
752 def which_apt_conf_file ():
753 res = socket.gethostbyaddr(socket.gethostname())
754 if Cnf.get("Config::" + res[0] + "::AptConfig"):
755 return Cnf["Config::" + res[0] + "::AptConfig"]
757 return default_apt_config
759 def which_alias_file():
760 hostname = socket.gethostbyaddr(socket.gethostname())[0]
761 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
762 if os.path.exists(aliasfn):
767 ################################################################################
769 # Escape characters which have meaning to SQL's regex comparison operator ('~')
770 # (woefully incomplete)
773 s = s.replace('+', '\\\\+')
774 s = s.replace('.', '\\\\.')
777 ################################################################################
779 def TemplateSubst(map, filename):
780 """ Perform a substition of template """
781 templatefile = open_file(filename)
782 template = templatefile.read()
784 template = template.replace(x,map[x])
788 ################################################################################
790 def fubar(msg, exit_code=1):
791 sys.stderr.write("E: %s\n" % (msg))
795 sys.stderr.write("W: %s\n" % (msg))
797 ################################################################################
799 # Returns the user name with a laughable attempt at rfc822 conformancy
800 # (read: removing stray periods).
802 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
804 ################################################################################
814 return ("%d%s" % (c, t))
816 ################################################################################
818 def cc_fix_changes (changes):
819 o = changes.get("architecture", "")
821 del changes["architecture"]
822 changes["architecture"] = {}
824 changes["architecture"][j] = 1
826 def changes_compare (a, b):
827 """ Sort by source name, source version, 'have source', and then by filename """
829 a_changes = parse_changes(a)
834 b_changes = parse_changes(b)
838 cc_fix_changes (a_changes)
839 cc_fix_changes (b_changes)
841 # Sort by source name
842 a_source = a_changes.get("source")
843 b_source = b_changes.get("source")
844 q = cmp (a_source, b_source)
848 # Sort by source version
849 a_version = a_changes.get("version", "0")
850 b_version = b_changes.get("version", "0")
851 q = apt_pkg.VersionCompare(a_version, b_version)
855 # Sort by 'have source'
856 a_has_source = a_changes["architecture"].get("source")
857 b_has_source = b_changes["architecture"].get("source")
858 if a_has_source and not b_has_source:
860 elif b_has_source and not a_has_source:
863 # Fall back to sort by filename
866 ################################################################################
868 def find_next_free (dest, too_many=100):
871 while os.path.exists(dest) and extra < too_many:
872 dest = orig_dest + '.' + repr(extra)
874 if extra >= too_many:
875 raise NoFreeFilenameError
878 ################################################################################
880 def result_join (original, sep = '\t'):
882 for i in xrange(len(original)):
883 if original[i] == None:
884 resultlist.append("")
886 resultlist.append(original[i])
887 return sep.join(resultlist)
889 ################################################################################
891 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
893 for line in str.split('\n'):
895 if line or include_blank_lines:
896 out += "%s%s\n" % (prefix, line)
897 # Strip trailing new line
902 ################################################################################
904 def validate_changes_file_arg(filename, require_changes=1):
906 'filename' is either a .changes or .dak file. If 'filename' is a
907 .dak file, it's changed to be the corresponding .changes file. The
908 function then checks if the .changes file a) exists and b) is
909 readable and returns the .changes filename if so. If there's a
910 problem, the next action depends on the option 'require_changes'
913 - If 'require_changes' == -1, errors are ignored and the .changes
914 filename is returned.
915 - If 'require_changes' == 0, a warning is given and 'None' is returned.
916 - If 'require_changes' == 1, a fatal error is raised.
921 orig_filename = filename
922 if filename.endswith(".dak"):
923 filename = filename[:-4]+".changes"
925 if not filename.endswith(".changes"):
926 error = "invalid file type; not a changes file"
928 if not os.access(filename,os.R_OK):
929 if os.path.exists(filename):
930 error = "permission denied"
932 error = "file not found"
935 if require_changes == 1:
936 fubar("%s: %s." % (orig_filename, error))
937 elif require_changes == 0:
938 warn("Skipping %s - %s" % (orig_filename, error))
940 else: # We only care about the .dak file
945 ################################################################################
948 return (arch != "source" and arch != "all")
950 ################################################################################
952 def join_with_commas_and(list):
953 if len(list) == 0: return "nothing"
954 if len(list) == 1: return list[0]
955 return ", ".join(list[:-1]) + " and " + list[-1]
957 ################################################################################
962 (pkg, version, constraint) = atom
964 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
967 pp_deps.append(pp_dep)
968 return " |".join(pp_deps)
970 ################################################################################
975 ################################################################################
977 def parse_args(Options):
978 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
982 for suite in split_args(Options["Suite"]):
983 suite_id = database.get_suite_id(suite)
985 warn("suite '%s' not recognised." % (suite))
987 suite_ids_list.append(suite_id)
989 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
991 fubar("No valid suite given.")
996 if Options["Component"]:
997 component_ids_list = []
998 for component in split_args(Options["Component"]):
999 component_id = database.get_component_id(component)
1000 if component_id == -1:
1001 warn("component '%s' not recognised." % (component))
1003 component_ids_list.append(component_id)
1004 if component_ids_list:
1005 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1007 fubar("No valid component given.")
1011 # Process architecture
1012 con_architectures = ""
1013 if Options["Architecture"]:
1016 for architecture in split_args(Options["Architecture"]):
1017 if architecture == "source":
1020 architecture_id = database.get_architecture_id(architecture)
1021 if architecture_id == -1:
1022 warn("architecture '%s' not recognised." % (architecture))
1024 arch_ids_list.append(architecture_id)
1026 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1028 if not check_source:
1029 fubar("No valid architecture given.")
1033 return (con_suites, con_architectures, con_components, check_source)
1035 ################################################################################
1037 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1038 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1041 tb = sys.exc_info()[2]
1048 frame = frame.f_back
1050 traceback.print_exc()
1052 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1053 frame.f_code.co_filename,
1055 for key, value in frame.f_locals.items():
1056 print "\t%20s = " % key,
1060 print "<unable to print>"
1062 ################################################################################
1064 def try_with_debug(function):
1072 ################################################################################
1074 def arch_compare_sw (a, b):
1076 Function for use in sorting lists of architectures.
1078 Sorts normally except that 'source' dominates all others.
1081 if a == "source" and b == "source":
1090 ################################################################################
1092 def split_args (s, dwim=1):
1094 Split command line arguments which can be separated by either commas
1095 or whitespace. If dwim is set, it will complain about string ending
1096 in comma since this usually means someone did 'dak ls -a i386, m68k
1097 foo' or something and the inevitable confusion resulting from 'm68k'
1098 being treated as an argument is undesirable.
1101 if s.find(",") == -1:
1104 if s[-1:] == "," and dwim:
1105 fubar("split_args: found trailing comma, spurious space maybe?")
1108 ################################################################################
1110 def Dict(**dict): return dict
1112 ########################################
1114 def gpgv_get_status_output(cmd, status_read, status_write):
1116 Our very own version of commands.getouputstatus(), hacked to support
1120 cmd = ['/bin/sh', '-c', cmd]
1121 p2cread, p2cwrite = os.pipe()
1122 c2pread, c2pwrite = os.pipe()
1123 errout, errin = os.pipe()
1133 for i in range(3, 256):
1134 if i != status_write:
1140 os.execvp(cmd[0], cmd)
1146 os.dup2(c2pread, c2pwrite)
1147 os.dup2(errout, errin)
1149 output = status = ""
1151 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1154 r = os.read(fd, 8196)
1156 more_data.append(fd)
1157 if fd == c2pwrite or fd == errin:
1159 elif fd == status_read:
1162 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1164 pid, exit_status = os.waitpid(pid, 0)
1166 os.close(status_write)
1167 os.close(status_read)
1177 return output, status, exit_status
1179 ################################################################################
1181 def process_gpgv_output(status):
1182 # Process the status-fd output
1185 for line in status.split('\n'):
1189 split = line.split()
1191 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1193 (gnupg, keyword) = split[:2]
1194 if gnupg != "[GNUPG:]":
1195 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1198 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1199 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1202 keywords[keyword] = args
1204 return (keywords, internal_error)
1206 ################################################################################
1208 def retrieve_key (filename, keyserver=None, keyring=None):
1210 Retrieve the key that signed 'filename' from 'keyserver' and
1211 add it to 'keyring'. Returns nothing on success, or an error message
1215 # Defaults for keyserver and keyring
1217 keyserver = Cnf["Dinstall::KeyServer"]
1219 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1221 # Ensure the filename contains no shell meta-characters or other badness
1222 if not re_taint_free.match(filename):
1223 return "%s: tainted filename" % (filename)
1225 # Invoke gpgv on the file
1226 status_read, status_write = os.pipe()
1227 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1228 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1230 # Process the status-fd output
1231 (keywords, internal_error) = process_gpgv_output(status)
1233 return internal_error
1235 if not keywords.has_key("NO_PUBKEY"):
1236 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1238 fingerprint = keywords["NO_PUBKEY"][0]
1239 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1240 # it'll try to create a lockfile in /dev. A better solution might
1241 # be a tempfile or something.
1242 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1243 % (Cnf["Dinstall::SigningKeyring"])
1244 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1245 % (keyring, keyserver, fingerprint)
1246 (result, output) = commands.getstatusoutput(cmd)
1248 return "'%s' failed with exit code %s" % (cmd, result)
1252 ################################################################################
1254 def gpg_keyring_args(keyrings=None):
1256 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1258 return " ".join(["--keyring %s" % x for x in keyrings])
1260 ################################################################################
1262 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1264 Check the signature of a file and return the fingerprint if the
1265 signature is valid or 'None' if it's not. The first argument is the
1266 filename whose signature should be checked. The second argument is a
1267 reject function and is called when an error is found. The reject()
1268 function must allow for two arguments: the first is the error message,
1269 the second is an optional prefix string. It's possible for reject()
1270 to be called more than once during an invocation of check_signature().
1271 The third argument is optional and is the name of the files the
1272 detached signature applies to. The fourth argument is optional and is
1273 a *list* of keyrings to use. 'autofetch' can either be None, True or
1274 False. If None, the default behaviour specified in the config will be
1278 # Ensure the filename contains no shell meta-characters or other badness
1279 if not re_taint_free.match(sig_filename):
1280 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1283 if data_filename and not re_taint_free.match(data_filename):
1284 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1288 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1290 # Autofetch the signing key if that's enabled
1291 if autofetch == None:
1292 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1294 error_msg = retrieve_key(sig_filename)
1299 # Build the command line
1300 status_read, status_write = os.pipe()
1301 cmd = "gpgv --status-fd %s %s %s %s" % (
1302 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1304 # Invoke gpgv on the file
1305 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1307 # Process the status-fd output
1308 (keywords, internal_error) = process_gpgv_output(status)
1310 # If we failed to parse the status-fd output, let's just whine and bail now
1312 reject("internal error while performing signature check on %s." % (sig_filename))
1313 reject(internal_error, "")
1314 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1318 # Now check for obviously bad things in the processed output
1319 if keywords.has_key("KEYREVOKED"):
1320 reject("The key used to sign %s has been revoked." % (sig_filename))
1322 if keywords.has_key("BADSIG"):
1323 reject("bad signature on %s." % (sig_filename))
1325 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1326 reject("failed to check signature on %s." % (sig_filename))
1328 if keywords.has_key("NO_PUBKEY"):
1329 args = keywords["NO_PUBKEY"]
1332 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1334 if keywords.has_key("BADARMOR"):
1335 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1337 if keywords.has_key("NODATA"):
1338 reject("no signature found in %s." % (sig_filename))
1340 if keywords.has_key("EXPKEYSIG"):
1341 args = keywords["EXPKEYSIG"]
1344 reject("Signature made by expired key 0x%s" % (key))
1346 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1347 args = keywords["KEYEXPIRED"]
1351 if timestamp.count("T") == 0:
1353 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1355 expiredate = "unknown (%s)" % (timestamp)
1357 expiredate = timestamp
1358 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1364 # Next check gpgv exited with a zero return code
1366 reject("gpgv failed while checking %s." % (sig_filename))
1368 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1370 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1373 # Sanity check the good stuff we expect
1374 if not keywords.has_key("VALIDSIG"):
1375 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1378 args = keywords["VALIDSIG"]
1380 reject("internal error while checking signature on %s." % (sig_filename))
1383 fingerprint = args[0]
1384 if not keywords.has_key("GOODSIG"):
1385 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1387 if not keywords.has_key("SIG_ID"):
1388 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1391 # Finally ensure there's not something we don't recognise
1392 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1393 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1394 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1396 for keyword in keywords.keys():
1397 if not known_keywords.has_key(keyword):
1398 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1406 ################################################################################
1408 def gpg_get_key_addresses(fingerprint):
1409 """retreive email addresses from gpg key uids for a given fingerprint"""
1410 addresses = key_uid_email_cache.get(fingerprint)
1411 if addresses != None:
1414 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1415 % (gpg_keyring_args(), fingerprint)
1416 (result, output) = commands.getstatusoutput(cmd)
1418 for l in output.split('\n'):
1419 m = re_gpg_uid.match(l)
1421 addresses.add(m.group(1))
1422 key_uid_email_cache[fingerprint] = addresses
1425 ################################################################################
1427 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1429 def wrap(paragraph, max_length, prefix=""):
1433 words = paragraph.split()
1436 word_size = len(word)
1437 if word_size > max_length:
1439 s += line + '\n' + prefix
1440 s += word + '\n' + prefix
1443 new_length = len(line) + word_size + 1
1444 if new_length > max_length:
1445 s += line + '\n' + prefix
1458 ################################################################################
1460 def clean_symlink (src, dest, root):
1462 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1465 src = src.replace(root, '', 1)
1466 dest = dest.replace(root, '', 1)
1467 dest = os.path.dirname(dest)
1468 new_src = '../' * len(dest.split('/'))
1469 return new_src + src
1471 ################################################################################
1473 def temp_filename(directory=None, prefix="dak", suffix=""):
1475 Return a secure and unique filename by pre-creating it.
1476 If 'directory' is non-null, it will be the directory the file is pre-created in.
1477 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1478 If 'suffix' is non-null, the filename will end with it.
1480 Returns a pair (fd, name).
1483 return tempfile.mkstemp(suffix, prefix, directory)
1485 ################################################################################
1487 def is_email_alias(email):
1488 """ checks if the user part of the email is listed in the alias file """
1490 if alias_cache == None:
1491 aliasfn = which_alias_file()
1494 for l in open(aliasfn):
1495 alias_cache.add(l.split(':')[0])
1496 uid = email.split('@')[0]
1497 return uid in alias_cache
1499 ################################################################################
1501 def get_changes_files(dir):
1503 Takes a directory and lists all .changes files in it (as well as chdir'ing
1504 to the directory; this is due to broken behaviour on the part of p-u/p-a
1505 when you're not in the right place)
1507 Returns a list of filenames
1510 # Much of the rest of p-u/p-a depends on being in the right place
1512 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1514 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1516 return changes_files
1518 ################################################################################
1522 Cnf = apt_pkg.newConfiguration()
1523 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1525 if which_conf_file() != default_config:
1526 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1528 ################################################################################