2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
43 from dak_exceptions import *
44 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
45 re_multi_line_field, re_srchasver, re_verwithext, \
46 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark
48 ################################################################################
50 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
51 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
53 alias_cache = None #: Cache for email alias checks
54 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
56 # (hashname, function, earliest_changes_version)
57 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
58 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
60 ################################################################################
63 """ Escape html chars """
64 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
66 ################################################################################
68 def open_file(filename, mode='r'):
70 Open C{file}, return fileobject.
72 @type filename: string
73 @param filename: path/filename to open
76 @param mode: open mode
79 @return: open fileobject
81 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
85 f = open(filename, mode)
87 raise CantOpenError, filename
90 ################################################################################
92 def our_raw_input(prompt=""):
94 sys.stdout.write(prompt)
100 sys.stderr.write("\nUser interrupt (^D).\n")
103 ################################################################################
105 def extract_component_from_section(section):
108 if section.find('/') != -1:
109 component = section.split('/')[0]
111 # Expand default component
113 if Cnf.has_key("Component::%s" % section):
118 return (section, component)
120 ################################################################################
122 def parse_deb822(contents, signing_rules=0):
126 # Split the lines in the input, keeping the linebreaks.
127 lines = contents.splitlines(True)
130 raise ParseChangesError, "[Empty changes file]"
132 # Reindex by line number so we can easily verify the format of
138 indexed_lines[index] = line[:-1]
142 num_of_lines = len(indexed_lines.keys())
145 while index < num_of_lines:
147 line = indexed_lines[index]
149 if signing_rules == 1:
151 if index > num_of_lines:
152 raise InvalidDscError, index
153 line = indexed_lines[index]
154 if not line.startswith("-----BEGIN PGP SIGNATURE"):
155 raise InvalidDscError, index
160 if line.startswith("-----BEGIN PGP SIGNATURE"):
162 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
164 if signing_rules == 1:
165 while index < num_of_lines and line != "":
167 line = indexed_lines[index]
169 # If we're not inside the signed data, don't process anything
170 if signing_rules >= 0 and not inside_signature:
172 slf = re_single_line_field.match(line)
174 field = slf.groups()[0].lower()
175 changes[field] = slf.groups()[1]
179 changes[field] += '\n'
181 mlf = re_multi_line_field.match(line)
184 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
185 if first == 1 and changes[field] != "":
186 changes[field] += '\n'
188 changes[field] += mlf.groups()[0] + '\n'
192 if signing_rules == 1 and inside_signature:
193 raise InvalidDscError, index
195 changes["filecontents"] = "".join(lines)
197 if changes.has_key("source"):
198 # Strip the source version in brackets from the source field,
199 # put it in the "source-version" field instead.
200 srcver = re_srchasver.search(changes["source"])
202 changes["source"] = srcver.group(1)
203 changes["source-version"] = srcver.group(2)
206 raise ParseChangesError, error
210 ################################################################################
212 def parse_changes(filename, signing_rules=0):
214 Parses a changes file and returns a dictionary where each field is a
215 key. The mandatory first argument is the filename of the .changes
218 signing_rules is an optional argument:
220 - If signing_rules == -1, no signature is required.
221 - If signing_rules == 0 (the default), a signature is required.
222 - If signing_rules == 1, it turns on the same strict format checking
225 The rules for (signing_rules == 1)-mode are:
227 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
228 followed by any PGP header data and must end with a blank line.
230 - The data section must end with a blank line and must be followed by
231 "-----BEGIN PGP SIGNATURE-----".
234 changes_in = open_file(filename)
235 content = changes_in.read()
238 unicode(content, 'utf-8')
240 raise ChangesUnicodeError, "Changes file not proper utf-8"
241 return parse_deb822(content, signing_rules)
243 ################################################################################
245 def hash_key(hashname):
246 return '%ssum' % hashname
248 ################################################################################
250 def create_hash(where, files, hashname, hashfunc):
252 create_hash extends the passed files dict with the given hash by
253 iterating over all files on disk and passing them to the hashing
258 for f in files.keys():
260 file_handle = open_file(f)
261 except CantOpenError:
262 rejmsg.append("Could not open file %s for checksumming" % (f))
265 files[f][hash_key(hashname)] = hashfunc(file_handle)
270 ################################################################################
272 def check_hash(where, files, hashname, hashfunc):
274 check_hash checks the given hash in the files dict against the actual
275 files on disk. The hash values need to be present consistently in
276 all file entries. It does not modify its input in any way.
280 for f in files.keys():
284 file_handle = open_file(f)
286 # Check for the hash entry, to not trigger a KeyError.
287 if not files[f].has_key(hash_key(hashname)):
288 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
292 # Actually check the hash for correctness.
293 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
294 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
296 except CantOpenError:
297 # TODO: This happens when the file is in the pool.
298 # warn("Cannot open file %s" % f)
305 ################################################################################
307 def check_size(where, files):
309 check_size checks the file sizes in the passed files dict against the
314 for f in files.keys():
319 # TODO: This happens when the file is in the pool.
323 actual_size = entry[stat.ST_SIZE]
324 size = int(files[f]["size"])
325 if size != actual_size:
326 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
327 % (f, actual_size, size, where))
330 ################################################################################
332 def check_hash_fields(what, manifest):
334 check_hash_fields ensures that there are no checksum fields in the
335 given dict that we do not know about.
339 hashes = map(lambda x: x[0], known_hashes)
340 for field in manifest:
341 if field.startswith("checksums-"):
342 hashname = field.split("-",1)[1]
343 if hashname not in hashes:
344 rejmsg.append("Unsupported checksum field for %s "\
345 "in %s" % (hashname, what))
348 ################################################################################
350 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
351 if format >= version:
352 # The version should contain the specified hash.
355 # Import hashes from the changes
356 rejmsg = parse_checksums(".changes", files, changes, hashname)
360 # We need to calculate the hash because it can't possibly
363 return func(".changes", files, hashname, hashfunc)
365 # We could add the orig which might be in the pool to the files dict to
366 # access the checksums easily.
368 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
370 ensure_dsc_hashes' task is to ensure that each and every *present* hash
371 in the dsc is correct, i.e. identical to the changes file and if necessary
372 the pool. The latter task is delegated to check_hash.
376 if not dsc.has_key('Checksums-%s' % (hashname,)):
378 # Import hashes from the dsc
379 parse_checksums(".dsc", dsc_files, dsc, hashname)
381 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
384 ################################################################################
386 def ensure_hashes(changes, dsc, files, dsc_files):
389 # Make sure we recognise the format of the Files: field in the .changes
390 format = changes.get("format", "0.0").split(".", 1)
392 format = int(format[0]), int(format[1])
394 format = int(float(format[0])), 0
396 # We need to deal with the original changes blob, as the fields we need
397 # might not be in the changes dict serialised into the .dak anymore.
398 orig_changes = parse_deb822(changes['filecontents'])
400 # Copy the checksums over to the current changes dict. This will keep
401 # the existing modifications to it intact.
402 for field in orig_changes:
403 if field.startswith('checksums-'):
404 changes[field] = orig_changes[field]
406 # Check for unsupported hashes
407 rejmsg.extend(check_hash_fields(".changes", changes))
408 rejmsg.extend(check_hash_fields(".dsc", dsc))
410 # We have to calculate the hash if we have an earlier changes version than
411 # the hash appears in rather than require it exist in the changes file
412 for hashname, hashfunc, version in known_hashes:
413 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
415 if "source" in changes["architecture"]:
416 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
421 def parse_checksums(where, files, manifest, hashname):
423 field = 'checksums-%s' % hashname
424 if not field in manifest:
426 for line in manifest[field].split('\n'):
429 checksum, size, checkfile = line.strip().split(' ')
430 if not files.has_key(checkfile):
431 # TODO: check for the file's entry in the original files dict, not
432 # the one modified by (auto)byhand and other weird stuff
433 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
434 # (file, hashname, where))
436 if not files[checkfile]["size"] == size:
437 rejmsg.append("%s: size differs for files and checksums-%s entry "\
438 "in %s" % (checkfile, hashname, where))
440 files[checkfile][hash_key(hashname)] = checksum
441 for f in files.keys():
442 if not files[f].has_key(hash_key(hashname)):
443 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
447 ################################################################################
449 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
451 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
454 # Make sure we have a Files: field to parse...
455 if not changes.has_key(field):
456 raise NoFilesFieldError
458 # Make sure we recognise the format of the Files: field
459 format = re_verwithext.search(changes.get("format", "0.0"))
461 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
463 format = format.groups()
464 if format[1] == None:
465 format = int(float(format[0])), 0, format[2]
467 format = int(format[0]), int(format[1]), format[2]
468 if format[2] == None:
472 # format = (1,0) are the only formats we currently accept,
473 # format = (0,0) are missing format headers of which we still
474 # have some in the archive.
475 if format != (1,0) and format != (0,0):
476 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
478 if (format < (1,5) or format > (1,8)):
479 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
480 if field != "files" and format < (1,8):
481 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
483 includes_section = (not is_a_dsc) and field == "files"
485 # Parse each entry/line:
486 for i in changes[field].split('\n'):
490 section = priority = ""
493 (md5, size, section, priority, name) = s
495 (md5, size, name) = s
497 raise ParseChangesError, i
504 (section, component) = extract_component_from_section(section)
506 files[name] = Dict(size=size, section=section,
507 priority=priority, component=component)
508 files[name][hashname] = md5
512 ################################################################################
514 def force_to_utf8(s):
516 Forces a string to UTF-8. If the string isn't already UTF-8,
517 it's assumed to be ISO-8859-1.
523 latin1_s = unicode(s,'iso8859-1')
524 return latin1_s.encode('utf-8')
526 def rfc2047_encode(s):
528 Encodes a (header) string per RFC2047 if necessary. If the
529 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
532 codecs.lookup('ascii')[1](s)
537 codecs.lookup('utf-8')[1](s)
538 h = email.Header.Header(s, 'utf-8', 998)
541 h = email.Header.Header(s, 'iso-8859-1', 998)
544 ################################################################################
546 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
547 # with it. I know - I'll fix the suckage and make things
550 def fix_maintainer (maintainer):
552 Parses a Maintainer or Changed-By field and returns:
553 1. an RFC822 compatible version,
554 2. an RFC2047 compatible version,
558 The name is forced to UTF-8 for both 1. and 3.. If the name field
559 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
560 switched to 'email (name)' format.
563 maintainer = maintainer.strip()
565 return ('', '', '', '')
567 if maintainer.find("<") == -1:
570 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
571 email = maintainer[1:-1]
574 m = re_parse_maintainer.match(maintainer)
576 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
580 # Get an RFC2047 compliant version of the name
581 rfc2047_name = rfc2047_encode(name)
583 # Force the name to be UTF-8
584 name = force_to_utf8(name)
586 if name.find(',') != -1 or name.find('.') != -1:
587 rfc822_maint = "%s (%s)" % (email, name)
588 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
590 rfc822_maint = "%s <%s>" % (name, email)
591 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
593 if email.find("@") == -1 and email.find("buildd_") != 0:
594 raise ParseMaintError, "No @ found in email address part."
596 return (rfc822_maint, rfc2047_maint, name, email)
598 ################################################################################
600 def send_mail (message, filename=""):
601 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
603 # If we've been passed a string dump it into a temporary file
605 (fd, filename) = tempfile.mkstemp()
606 os.write (fd, message)
609 if Cnf.has_key("Dinstall::MailWhiteList") and \
610 Cnf["Dinstall::MailWhiteList"] != "":
611 message_in = open_file(filename)
612 message_raw = modemail.message_from_file(message_in)
616 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
618 for line in whitelist_in:
619 if re_re_mark.match(line):
620 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
622 whitelist.append(re.compile(re.escape(line.strip())))
627 fields = ["To", "Bcc", "Cc"]
630 value = message_raw.get(field, None)
633 for item in value.split(","):
634 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
640 if not mail_whitelisted:
641 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
645 # Doesn't have any mail in whitelist so remove the header
647 del message_raw[field]
649 message_raw.replace_header(field, string.join(match, ", "))
651 # Change message fields in order if we don't have a To header
652 if not message_raw.has_key("To"):
655 if message_raw.has_key(field):
656 message_raw[fields[-1]] = message_raw[field]
657 del message_raw[field]
660 # Clean up any temporary files
661 # and return, as we removed all recipients.
663 os.unlink (filename);
666 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
667 os.write (fd, message_raw.as_string(True));
671 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
673 raise SendmailFailedError, output
675 # Clean up any temporary files
679 ################################################################################
681 def poolify (source, component):
684 if source[:3] == "lib":
685 return component + source[:4] + '/' + source + '/'
687 return component + source[:1] + '/' + source + '/'
689 ################################################################################
691 def move (src, dest, overwrite = 0, perms = 0664):
692 if os.path.exists(dest) and os.path.isdir(dest):
695 dest_dir = os.path.dirname(dest)
696 if not os.path.exists(dest_dir):
697 umask = os.umask(00000)
698 os.makedirs(dest_dir, 02775)
700 #print "Moving %s to %s..." % (src, dest)
701 if os.path.exists(dest) and os.path.isdir(dest):
702 dest += '/' + os.path.basename(src)
703 # Don't overwrite unless forced to
704 if os.path.exists(dest):
706 fubar("Can't move %s to %s - file already exists." % (src, dest))
708 if not os.access(dest, os.W_OK):
709 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
710 shutil.copy2(src, dest)
711 os.chmod(dest, perms)
714 def copy (src, dest, overwrite = 0, perms = 0664):
715 if os.path.exists(dest) and os.path.isdir(dest):
718 dest_dir = os.path.dirname(dest)
719 if not os.path.exists(dest_dir):
720 umask = os.umask(00000)
721 os.makedirs(dest_dir, 02775)
723 #print "Copying %s to %s..." % (src, dest)
724 if os.path.exists(dest) and os.path.isdir(dest):
725 dest += '/' + os.path.basename(src)
726 # Don't overwrite unless forced to
727 if os.path.exists(dest):
729 raise FileExistsError
731 if not os.access(dest, os.W_OK):
732 raise CantOverwriteError
733 shutil.copy2(src, dest)
734 os.chmod(dest, perms)
736 ################################################################################
739 res = socket.gethostbyaddr(socket.gethostname())
740 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
741 if database_hostname:
742 return database_hostname
746 def which_conf_file ():
747 res = socket.gethostbyaddr(socket.gethostname())
748 if Cnf.get("Config::" + res[0] + "::DakConfig"):
749 return Cnf["Config::" + res[0] + "::DakConfig"]
751 return default_config
753 def which_apt_conf_file ():
754 res = socket.gethostbyaddr(socket.gethostname())
755 if Cnf.get("Config::" + res[0] + "::AptConfig"):
756 return Cnf["Config::" + res[0] + "::AptConfig"]
758 return default_apt_config
760 def which_alias_file():
761 hostname = socket.gethostbyaddr(socket.gethostname())[0]
762 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
763 if os.path.exists(aliasfn):
768 ################################################################################
770 # Escape characters which have meaning to SQL's regex comparison operator ('~')
771 # (woefully incomplete)
774 s = s.replace('+', '\\\\+')
775 s = s.replace('.', '\\\\.')
778 ################################################################################
780 def TemplateSubst(map, filename):
781 """ Perform a substition of template """
782 templatefile = open_file(filename)
783 template = templatefile.read()
785 template = template.replace(x,map[x])
789 ################################################################################
791 def fubar(msg, exit_code=1):
792 sys.stderr.write("E: %s\n" % (msg))
796 sys.stderr.write("W: %s\n" % (msg))
798 ################################################################################
800 # Returns the user name with a laughable attempt at rfc822 conformancy
801 # (read: removing stray periods).
803 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
805 ################################################################################
815 return ("%d%s" % (c, t))
817 ################################################################################
819 def cc_fix_changes (changes):
820 o = changes.get("architecture", "")
822 del changes["architecture"]
823 changes["architecture"] = {}
825 changes["architecture"][j] = 1
827 def changes_compare (a, b):
828 """ Sort by source name, source version, 'have source', and then by filename """
830 a_changes = parse_changes(a)
835 b_changes = parse_changes(b)
839 cc_fix_changes (a_changes)
840 cc_fix_changes (b_changes)
842 # Sort by source name
843 a_source = a_changes.get("source")
844 b_source = b_changes.get("source")
845 q = cmp (a_source, b_source)
849 # Sort by source version
850 a_version = a_changes.get("version", "0")
851 b_version = b_changes.get("version", "0")
852 q = apt_pkg.VersionCompare(a_version, b_version)
856 # Sort by 'have source'
857 a_has_source = a_changes["architecture"].get("source")
858 b_has_source = b_changes["architecture"].get("source")
859 if a_has_source and not b_has_source:
861 elif b_has_source and not a_has_source:
864 # Fall back to sort by filename
867 ################################################################################
869 def find_next_free (dest, too_many=100):
872 while os.path.exists(dest) and extra < too_many:
873 dest = orig_dest + '.' + repr(extra)
875 if extra >= too_many:
876 raise NoFreeFilenameError
879 ################################################################################
881 def result_join (original, sep = '\t'):
883 for i in xrange(len(original)):
884 if original[i] == None:
885 resultlist.append("")
887 resultlist.append(original[i])
888 return sep.join(resultlist)
890 ################################################################################
892 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
894 for line in str.split('\n'):
896 if line or include_blank_lines:
897 out += "%s%s\n" % (prefix, line)
898 # Strip trailing new line
903 ################################################################################
905 def validate_changes_file_arg(filename, require_changes=1):
907 'filename' is either a .changes or .dak file. If 'filename' is a
908 .dak file, it's changed to be the corresponding .changes file. The
909 function then checks if the .changes file a) exists and b) is
910 readable and returns the .changes filename if so. If there's a
911 problem, the next action depends on the option 'require_changes'
914 - If 'require_changes' == -1, errors are ignored and the .changes
915 filename is returned.
916 - If 'require_changes' == 0, a warning is given and 'None' is returned.
917 - If 'require_changes' == 1, a fatal error is raised.
922 orig_filename = filename
923 if filename.endswith(".dak"):
924 filename = filename[:-4]+".changes"
926 if not filename.endswith(".changes"):
927 error = "invalid file type; not a changes file"
929 if not os.access(filename,os.R_OK):
930 if os.path.exists(filename):
931 error = "permission denied"
933 error = "file not found"
936 if require_changes == 1:
937 fubar("%s: %s." % (orig_filename, error))
938 elif require_changes == 0:
939 warn("Skipping %s - %s" % (orig_filename, error))
941 else: # We only care about the .dak file
946 ################################################################################
949 return (arch != "source" and arch != "all")
951 ################################################################################
953 def join_with_commas_and(list):
954 if len(list) == 0: return "nothing"
955 if len(list) == 1: return list[0]
956 return ", ".join(list[:-1]) + " and " + list[-1]
958 ################################################################################
963 (pkg, version, constraint) = atom
965 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
968 pp_deps.append(pp_dep)
969 return " |".join(pp_deps)
971 ################################################################################
976 ################################################################################
978 def parse_args(Options):
979 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
983 for suite in split_args(Options["Suite"]):
984 suite_id = database.get_suite_id(suite)
986 warn("suite '%s' not recognised." % (suite))
988 suite_ids_list.append(suite_id)
990 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
992 fubar("No valid suite given.")
997 if Options["Component"]:
998 component_ids_list = []
999 for component in split_args(Options["Component"]):
1000 component_id = database.get_component_id(component)
1001 if component_id == -1:
1002 warn("component '%s' not recognised." % (component))
1004 component_ids_list.append(component_id)
1005 if component_ids_list:
1006 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1008 fubar("No valid component given.")
1012 # Process architecture
1013 con_architectures = ""
1014 if Options["Architecture"]:
1017 for architecture in split_args(Options["Architecture"]):
1018 if architecture == "source":
1021 architecture_id = database.get_architecture_id(architecture)
1022 if architecture_id == -1:
1023 warn("architecture '%s' not recognised." % (architecture))
1025 arch_ids_list.append(architecture_id)
1027 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1029 if not check_source:
1030 fubar("No valid architecture given.")
1034 return (con_suites, con_architectures, con_components, check_source)
1036 ################################################################################
1038 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1039 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1042 tb = sys.exc_info()[2]
1049 frame = frame.f_back
1051 traceback.print_exc()
1053 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1054 frame.f_code.co_filename,
1056 for key, value in frame.f_locals.items():
1057 print "\t%20s = " % key,
1061 print "<unable to print>"
1063 ################################################################################
1065 def try_with_debug(function):
1073 ################################################################################
1075 def arch_compare_sw (a, b):
1077 Function for use in sorting lists of architectures.
1079 Sorts normally except that 'source' dominates all others.
1082 if a == "source" and b == "source":
1091 ################################################################################
1093 def split_args (s, dwim=1):
1095 Split command line arguments which can be separated by either commas
1096 or whitespace. If dwim is set, it will complain about string ending
1097 in comma since this usually means someone did 'dak ls -a i386, m68k
1098 foo' or something and the inevitable confusion resulting from 'm68k'
1099 being treated as an argument is undesirable.
1102 if s.find(",") == -1:
1105 if s[-1:] == "," and dwim:
1106 fubar("split_args: found trailing comma, spurious space maybe?")
1109 ################################################################################
1111 def Dict(**dict): return dict
1113 ########################################
1115 def gpgv_get_status_output(cmd, status_read, status_write):
1117 Our very own version of commands.getouputstatus(), hacked to support
1121 cmd = ['/bin/sh', '-c', cmd]
1122 p2cread, p2cwrite = os.pipe()
1123 c2pread, c2pwrite = os.pipe()
1124 errout, errin = os.pipe()
1134 for i in range(3, 256):
1135 if i != status_write:
1141 os.execvp(cmd[0], cmd)
1147 os.dup2(c2pread, c2pwrite)
1148 os.dup2(errout, errin)
1150 output = status = ""
1152 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1155 r = os.read(fd, 8196)
1157 more_data.append(fd)
1158 if fd == c2pwrite or fd == errin:
1160 elif fd == status_read:
1163 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1165 pid, exit_status = os.waitpid(pid, 0)
1167 os.close(status_write)
1168 os.close(status_read)
1178 return output, status, exit_status
1180 ################################################################################
1182 def process_gpgv_output(status):
1183 # Process the status-fd output
1186 for line in status.split('\n'):
1190 split = line.split()
1192 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1194 (gnupg, keyword) = split[:2]
1195 if gnupg != "[GNUPG:]":
1196 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1199 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1200 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1203 keywords[keyword] = args
1205 return (keywords, internal_error)
1207 ################################################################################
1209 def retrieve_key (filename, keyserver=None, keyring=None):
1211 Retrieve the key that signed 'filename' from 'keyserver' and
1212 add it to 'keyring'. Returns nothing on success, or an error message
1216 # Defaults for keyserver and keyring
1218 keyserver = Cnf["Dinstall::KeyServer"]
1220 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1222 # Ensure the filename contains no shell meta-characters or other badness
1223 if not re_taint_free.match(filename):
1224 return "%s: tainted filename" % (filename)
1226 # Invoke gpgv on the file
1227 status_read, status_write = os.pipe()
1228 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1229 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1231 # Process the status-fd output
1232 (keywords, internal_error) = process_gpgv_output(status)
1234 return internal_error
1236 if not keywords.has_key("NO_PUBKEY"):
1237 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1239 fingerprint = keywords["NO_PUBKEY"][0]
1240 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1241 # it'll try to create a lockfile in /dev. A better solution might
1242 # be a tempfile or something.
1243 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1244 % (Cnf["Dinstall::SigningKeyring"])
1245 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1246 % (keyring, keyserver, fingerprint)
1247 (result, output) = commands.getstatusoutput(cmd)
1249 return "'%s' failed with exit code %s" % (cmd, result)
1253 ################################################################################
1255 def gpg_keyring_args(keyrings=None):
1257 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1259 return " ".join(["--keyring %s" % x for x in keyrings])
1261 ################################################################################
1263 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1265 Check the signature of a file and return the fingerprint if the
1266 signature is valid or 'None' if it's not. The first argument is the
1267 filename whose signature should be checked. The second argument is a
1268 reject function and is called when an error is found. The reject()
1269 function must allow for two arguments: the first is the error message,
1270 the second is an optional prefix string. It's possible for reject()
1271 to be called more than once during an invocation of check_signature().
1272 The third argument is optional and is the name of the files the
1273 detached signature applies to. The fourth argument is optional and is
1274 a *list* of keyrings to use. 'autofetch' can either be None, True or
1275 False. If None, the default behaviour specified in the config will be
1279 # Ensure the filename contains no shell meta-characters or other badness
1280 if not re_taint_free.match(sig_filename):
1281 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1284 if data_filename and not re_taint_free.match(data_filename):
1285 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1289 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1291 # Autofetch the signing key if that's enabled
1292 if autofetch == None:
1293 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1295 error_msg = retrieve_key(sig_filename)
1300 # Build the command line
1301 status_read, status_write = os.pipe()
1302 cmd = "gpgv --status-fd %s %s %s %s" % (
1303 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1305 # Invoke gpgv on the file
1306 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1308 # Process the status-fd output
1309 (keywords, internal_error) = process_gpgv_output(status)
1311 # If we failed to parse the status-fd output, let's just whine and bail now
1313 reject("internal error while performing signature check on %s." % (sig_filename))
1314 reject(internal_error, "")
1315 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1319 # Now check for obviously bad things in the processed output
1320 if keywords.has_key("KEYREVOKED"):
1321 reject("The key used to sign %s has been revoked." % (sig_filename))
1323 if keywords.has_key("BADSIG"):
1324 reject("bad signature on %s." % (sig_filename))
1326 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1327 reject("failed to check signature on %s." % (sig_filename))
1329 if keywords.has_key("NO_PUBKEY"):
1330 args = keywords["NO_PUBKEY"]
1333 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1335 if keywords.has_key("BADARMOR"):
1336 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1338 if keywords.has_key("NODATA"):
1339 reject("no signature found in %s." % (sig_filename))
1341 if keywords.has_key("EXPKEYSIG"):
1342 args = keywords["EXPKEYSIG"]
1345 reject("Signature made by expired key 0x%s" % (key))
1347 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1348 args = keywords["KEYEXPIRED"]
1352 if timestamp.count("T") == 0:
1354 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1356 expiredate = "unknown (%s)" % (timestamp)
1358 expiredate = timestamp
1359 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1365 # Next check gpgv exited with a zero return code
1367 reject("gpgv failed while checking %s." % (sig_filename))
1369 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1371 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1374 # Sanity check the good stuff we expect
1375 if not keywords.has_key("VALIDSIG"):
1376 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1379 args = keywords["VALIDSIG"]
1381 reject("internal error while checking signature on %s." % (sig_filename))
1384 fingerprint = args[0]
1385 if not keywords.has_key("GOODSIG"):
1386 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1388 if not keywords.has_key("SIG_ID"):
1389 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1392 # Finally ensure there's not something we don't recognise
1393 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1394 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1395 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1397 for keyword in keywords.keys():
1398 if not known_keywords.has_key(keyword):
1399 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1407 ################################################################################
1409 def gpg_get_key_addresses(fingerprint):
1410 """retreive email addresses from gpg key uids for a given fingerprint"""
1411 addresses = key_uid_email_cache.get(fingerprint)
1412 if addresses != None:
1415 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1416 % (gpg_keyring_args(), fingerprint)
1417 (result, output) = commands.getstatusoutput(cmd)
1419 for l in output.split('\n'):
1420 m = re_gpg_uid.match(l)
1422 addresses.add(m.group(1))
1423 key_uid_email_cache[fingerprint] = addresses
1426 ################################################################################
1428 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1430 def wrap(paragraph, max_length, prefix=""):
1434 words = paragraph.split()
1437 word_size = len(word)
1438 if word_size > max_length:
1440 s += line + '\n' + prefix
1441 s += word + '\n' + prefix
1444 new_length = len(line) + word_size + 1
1445 if new_length > max_length:
1446 s += line + '\n' + prefix
1459 ################################################################################
1461 def clean_symlink (src, dest, root):
1463 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1466 src = src.replace(root, '', 1)
1467 dest = dest.replace(root, '', 1)
1468 dest = os.path.dirname(dest)
1469 new_src = '../' * len(dest.split('/'))
1470 return new_src + src
1472 ################################################################################
1474 def temp_filename(directory=None, prefix="dak", suffix=""):
1476 Return a secure and unique filename by pre-creating it.
1477 If 'directory' is non-null, it will be the directory the file is pre-created in.
1478 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1479 If 'suffix' is non-null, the filename will end with it.
1481 Returns a pair (fd, name).
1484 return tempfile.mkstemp(suffix, prefix, directory)
1486 ################################################################################
1488 def temp_dirname(parent=None, prefix="dak", suffix=""):
1490 Return a secure and unique directory by pre-creating it.
1491 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1492 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1493 If 'suffix' is non-null, the filename will end with it.
1495 Returns a pathname to the new directory
1498 return tempfile.mkdtemp(suffix, prefix, parent)
1500 ################################################################################
1502 def is_email_alias(email):
1503 """ checks if the user part of the email is listed in the alias file """
1505 if alias_cache == None:
1506 aliasfn = which_alias_file()
1509 for l in open(aliasfn):
1510 alias_cache.add(l.split(':')[0])
1511 uid = email.split('@')[0]
1512 return uid in alias_cache
1514 ################################################################################
1516 def get_changes_files(dir):
1518 Takes a directory and lists all .changes files in it (as well as chdir'ing
1519 to the directory; this is due to broken behaviour on the part of p-u/p-a
1520 when you're not in the right place)
1522 Returns a list of filenames
1525 # Much of the rest of p-u/p-a depends on being in the right place
1527 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1529 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1531 return changes_files
1533 ################################################################################
1537 Cnf = apt_pkg.newConfiguration()
1538 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1540 if which_conf_file() != default_config:
1541 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1543 ###############################################################################