2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
43 from dak_exceptions import *
44 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
45 re_multi_line_field, re_srchasver, re_verwithext, \
46 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark
48 ################################################################################
50 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
51 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
53 alias_cache = None #: Cache for email alias checks
54 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
56 # (hashname, function, earliest_changes_version)
57 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
58 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
60 ################################################################################
63 """ Escape html chars """
64 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
66 ################################################################################
68 def open_file(filename, mode='r'):
70 Open C{file}, return fileobject.
72 @type filename: string
73 @param filename: path/filename to open
76 @param mode: open mode
79 @return: open fileobject
81 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
85 f = open(filename, mode)
87 raise CantOpenError, filename
90 ################################################################################
92 def our_raw_input(prompt=""):
94 sys.stdout.write(prompt)
100 sys.stderr.write("\nUser interrupt (^D).\n")
103 ################################################################################
105 def extract_component_from_section(section):
108 if section.find('/') != -1:
109 component = section.split('/')[0]
111 # Expand default component
113 if Cnf.has_key("Component::%s" % section):
118 return (section, component)
120 ################################################################################
122 def parse_deb822(contents, signing_rules=0):
126 # Split the lines in the input, keeping the linebreaks.
127 lines = contents.splitlines(True)
130 raise ParseChangesError, "[Empty changes file]"
132 # Reindex by line number so we can easily verify the format of
138 indexed_lines[index] = line[:-1]
142 num_of_lines = len(indexed_lines.keys())
145 while index < num_of_lines:
147 line = indexed_lines[index]
149 if signing_rules == 1:
151 if index > num_of_lines:
152 raise InvalidDscError, index
153 line = indexed_lines[index]
154 if not line.startswith("-----BEGIN PGP SIGNATURE"):
155 raise InvalidDscError, index
160 if line.startswith("-----BEGIN PGP SIGNATURE"):
162 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
164 if signing_rules == 1:
165 while index < num_of_lines and line != "":
167 line = indexed_lines[index]
169 # If we're not inside the signed data, don't process anything
170 if signing_rules >= 0 and not inside_signature:
172 slf = re_single_line_field.match(line)
174 field = slf.groups()[0].lower()
175 changes[field] = slf.groups()[1]
179 changes[field] += '\n'
181 mlf = re_multi_line_field.match(line)
184 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
185 if first == 1 and changes[field] != "":
186 changes[field] += '\n'
188 changes[field] += mlf.groups()[0] + '\n'
192 if signing_rules == 1 and inside_signature:
193 raise InvalidDscError, index
195 changes["filecontents"] = "".join(lines)
197 if changes.has_key("source"):
198 # Strip the source version in brackets from the source field,
199 # put it in the "source-version" field instead.
200 srcver = re_srchasver.search(changes["source"])
202 changes["source"] = srcver.group(1)
203 changes["source-version"] = srcver.group(2)
206 raise ParseChangesError, error
210 ################################################################################
212 def parse_changes(filename, signing_rules=0):
214 Parses a changes file and returns a dictionary where each field is a
215 key. The mandatory first argument is the filename of the .changes
218 signing_rules is an optional argument:
220 - If signing_rules == -1, no signature is required.
221 - If signing_rules == 0 (the default), a signature is required.
222 - If signing_rules == 1, it turns on the same strict format checking
225 The rules for (signing_rules == 1)-mode are:
227 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
228 followed by any PGP header data and must end with a blank line.
230 - The data section must end with a blank line and must be followed by
231 "-----BEGIN PGP SIGNATURE-----".
234 changes_in = open_file(filename)
235 content = changes_in.read()
238 unicode(content, 'utf-8')
240 raise ChangesUnicodeError, "Changes file not proper utf-8"
241 return parse_deb822(content, signing_rules)
243 ################################################################################
245 def hash_key(hashname):
246 return '%ssum' % hashname
248 ################################################################################
250 def create_hash(where, files, hashname, hashfunc):
252 create_hash extends the passed files dict with the given hash by
253 iterating over all files on disk and passing them to the hashing
258 for f in files.keys():
260 file_handle = open_file(f)
261 except CantOpenError:
262 rejmsg.append("Could not open file %s for checksumming" % (f))
265 files[f][hash_key(hashname)] = hashfunc(file_handle)
270 ################################################################################
272 def check_hash(where, files, hashname, hashfunc):
274 check_hash checks the given hash in the files dict against the actual
275 files on disk. The hash values need to be present consistently in
276 all file entries. It does not modify its input in any way.
280 for f in files.keys():
284 file_handle = open_file(f)
286 # Check for the hash entry, to not trigger a KeyError.
287 if not files[f].has_key(hash_key(hashname)):
288 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
292 # Actually check the hash for correctness.
293 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
294 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
296 except CantOpenError:
297 # TODO: This happens when the file is in the pool.
298 # warn("Cannot open file %s" % f)
305 ################################################################################
307 def check_size(where, files):
309 check_size checks the file sizes in the passed files dict against the
314 for f in files.keys():
319 # TODO: This happens when the file is in the pool.
323 actual_size = entry[stat.ST_SIZE]
324 size = int(files[f]["size"])
325 if size != actual_size:
326 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
327 % (f, actual_size, size, where))
330 ################################################################################
332 def check_hash_fields(what, manifest):
334 check_hash_fields ensures that there are no checksum fields in the
335 given dict that we do not know about.
339 hashes = map(lambda x: x[0], known_hashes)
340 for field in manifest:
341 if field.startswith("checksums-"):
342 hashname = field.split("-",1)[1]
343 if hashname not in hashes:
344 rejmsg.append("Unsupported checksum field for %s "\
345 "in %s" % (hashname, what))
348 ################################################################################
350 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
351 if format >= version:
352 # The version should contain the specified hash.
355 # Import hashes from the changes
356 rejmsg = parse_checksums(".changes", files, changes, hashname)
360 # We need to calculate the hash because it can't possibly
363 return func(".changes", files, hashname, hashfunc)
365 # We could add the orig which might be in the pool to the files dict to
366 # access the checksums easily.
368 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
370 ensure_dsc_hashes' task is to ensure that each and every *present* hash
371 in the dsc is correct, i.e. identical to the changes file and if necessary
372 the pool. The latter task is delegated to check_hash.
376 if not dsc.has_key('Checksums-%s' % (hashname,)):
378 # Import hashes from the dsc
379 parse_checksums(".dsc", dsc_files, dsc, hashname)
381 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
384 ################################################################################
386 def ensure_hashes(changes, dsc, files, dsc_files):
389 # Make sure we recognise the format of the Files: field in the .changes
390 format = changes.get("format", "0.0").split(".", 1)
392 format = int(format[0]), int(format[1])
394 format = int(float(format[0])), 0
396 # We need to deal with the original changes blob, as the fields we need
397 # might not be in the changes dict serialised into the .dak anymore.
398 orig_changes = parse_deb822(changes['filecontents'])
400 # Copy the checksums over to the current changes dict. This will keep
401 # the existing modifications to it intact.
402 for field in orig_changes:
403 if field.startswith('checksums-'):
404 changes[field] = orig_changes[field]
406 # Check for unsupported hashes
407 rejmsg.extend(check_hash_fields(".changes", changes))
408 rejmsg.extend(check_hash_fields(".dsc", dsc))
410 # We have to calculate the hash if we have an earlier changes version than
411 # the hash appears in rather than require it exist in the changes file
412 for hashname, hashfunc, version in known_hashes:
413 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
415 if "source" in changes["architecture"]:
416 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
421 def parse_checksums(where, files, manifest, hashname):
423 field = 'checksums-%s' % hashname
424 if not field in manifest:
426 for line in manifest[field].split('\n'):
429 checksum, size, checkfile = line.strip().split(' ')
430 if not files.has_key(checkfile):
431 # TODO: check for the file's entry in the original files dict, not
432 # the one modified by (auto)byhand and other weird stuff
433 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
434 # (file, hashname, where))
436 if not files[checkfile]["size"] == size:
437 rejmsg.append("%s: size differs for files and checksums-%s entry "\
438 "in %s" % (checkfile, hashname, where))
440 files[checkfile][hash_key(hashname)] = checksum
441 for f in files.keys():
442 if not files[f].has_key(hash_key(hashname)):
443 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
447 ################################################################################
449 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
451 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
454 # Make sure we have a Files: field to parse...
455 if not changes.has_key(field):
456 raise NoFilesFieldError
458 # Make sure we recognise the format of the Files: field
459 format = re_verwithext.search(changes.get("format", "0.0"))
461 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
463 format = format.groups()
464 if format[1] == None:
465 format = int(float(format[0])), 0, format[2]
467 format = int(format[0]), int(format[1]), format[2]
468 if format[2] == None:
472 # format = (1,0) are the only formats we currently accept,
473 # format = (0,0) are missing format headers of which we still
474 # have some in the archive.
475 if format != (1,0) and format != (0,0):
476 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
478 if (format < (1,5) or format > (1,8)):
479 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
480 if field != "files" and format < (1,8):
481 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
483 includes_section = (not is_a_dsc) and field == "files"
485 # Parse each entry/line:
486 for i in changes[field].split('\n'):
490 section = priority = ""
493 (md5, size, section, priority, name) = s
495 (md5, size, name) = s
497 raise ParseChangesError, i
504 (section, component) = extract_component_from_section(section)
506 files[name] = Dict(size=size, section=section,
507 priority=priority, component=component)
508 files[name][hashname] = md5
512 ################################################################################
514 def force_to_utf8(s):
516 Forces a string to UTF-8. If the string isn't already UTF-8,
517 it's assumed to be ISO-8859-1.
523 latin1_s = unicode(s,'iso8859-1')
524 return latin1_s.encode('utf-8')
526 def rfc2047_encode(s):
528 Encodes a (header) string per RFC2047 if necessary. If the
529 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
532 codecs.lookup('ascii')[1](s)
537 codecs.lookup('utf-8')[1](s)
538 h = email.Header.Header(s, 'utf-8', 998)
541 h = email.Header.Header(s, 'iso-8859-1', 998)
544 ################################################################################
546 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
547 # with it. I know - I'll fix the suckage and make things
550 def fix_maintainer (maintainer):
552 Parses a Maintainer or Changed-By field and returns:
553 1. an RFC822 compatible version,
554 2. an RFC2047 compatible version,
558 The name is forced to UTF-8 for both 1. and 3.. If the name field
559 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
560 switched to 'email (name)' format.
563 maintainer = maintainer.strip()
565 return ('', '', '', '')
567 if maintainer.find("<") == -1:
570 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
571 email = maintainer[1:-1]
574 m = re_parse_maintainer.match(maintainer)
576 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
580 # Get an RFC2047 compliant version of the name
581 rfc2047_name = rfc2047_encode(name)
583 # Force the name to be UTF-8
584 name = force_to_utf8(name)
586 if name.find(',') != -1 or name.find('.') != -1:
587 rfc822_maint = "%s (%s)" % (email, name)
588 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
590 rfc822_maint = "%s <%s>" % (name, email)
591 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
593 if email.find("@") == -1 and email.find("buildd_") != 0:
594 raise ParseMaintError, "No @ found in email address part."
596 return (rfc822_maint, rfc2047_maint, name, email)
598 ################################################################################
600 def send_mail (message, filename=""):
601 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
603 # If we've been passed a string dump it into a temporary file
605 (fd, filename) = tempfile.mkstemp()
606 os.write (fd, message)
609 if Cnf.has_key("Dinstall::MailWhiteList") and \
610 Cnf["Dinstall::MailWhiteList"] != "":
611 message_in = open_file(filename)
612 message_raw = modemail.message_from_file(message_in)
616 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
618 for line in whitelist_in:
619 if re_re_mark.match(line):
620 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
622 whitelist.append(re.compile(re.escape(line.strip())))
627 fields = ["To", "Bcc", "Cc"]
630 value = message_raw.get(field, None)
633 for item in value.split(","):
634 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
640 if not mail_whitelisted:
641 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
645 # Doesn't have any mail in whitelist so remove the header
647 del message_raw[field]
649 message_raw.replace_header(field, string.join(match, ", "))
651 # Change message fields in order if we don't have a To header
652 if not message_raw.has_key("To"):
655 if message_raw.has_key(field):
656 message_raw[fields[-1]] = message_raw[field]
657 del message_raw[field]
660 # Clean up any temporary files
661 # and return, as we removed all recipients.
663 os.unlink (filename);
666 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
667 os.write (fd, message_raw.as_string(True));
671 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
673 raise SendmailFailedError, output
675 # Clean up any temporary files
679 ################################################################################
681 def poolify (source, component):
684 if source[:3] == "lib":
685 return component + source[:4] + '/' + source + '/'
687 return component + source[:1] + '/' + source + '/'
689 ################################################################################
691 def move (src, dest, overwrite = 0, perms = 0664):
692 if os.path.exists(dest) and os.path.isdir(dest):
695 dest_dir = os.path.dirname(dest)
696 if not os.path.exists(dest_dir):
697 umask = os.umask(00000)
698 os.makedirs(dest_dir, 02775)
700 #print "Moving %s to %s..." % (src, dest)
701 if os.path.exists(dest) and os.path.isdir(dest):
702 dest += '/' + os.path.basename(src)
703 # Don't overwrite unless forced to
704 if os.path.exists(dest):
706 fubar("Can't move %s to %s - file already exists." % (src, dest))
708 if not os.access(dest, os.W_OK):
709 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
710 shutil.copy2(src, dest)
711 os.chmod(dest, perms)
714 def copy (src, dest, overwrite = 0, perms = 0664):
715 if os.path.exists(dest) and os.path.isdir(dest):
718 dest_dir = os.path.dirname(dest)
719 if not os.path.exists(dest_dir):
720 umask = os.umask(00000)
721 os.makedirs(dest_dir, 02775)
723 #print "Copying %s to %s..." % (src, dest)
724 if os.path.exists(dest) and os.path.isdir(dest):
725 dest += '/' + os.path.basename(src)
726 # Don't overwrite unless forced to
727 if os.path.exists(dest):
729 raise FileExistsError
731 if not os.access(dest, os.W_OK):
732 raise CantOverwriteError
733 shutil.copy2(src, dest)
734 os.chmod(dest, perms)
736 ################################################################################
739 res = socket.gethostbyaddr(socket.gethostname())
740 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
741 if database_hostname:
742 return database_hostname
746 def which_conf_file ():
747 res = socket.gethostbyaddr(socket.gethostname())
748 # In case we allow local config files per user, try if one exists
749 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
750 homedir = os.getenv("HOME")
751 confpath = os.path.join(homedir, "/etc/dak.conf")
752 if os.path.exists(confpath):
755 # We are still in here, so there is no local config file or we do
756 # not allow local files. Do the normal stuff.
757 if Cnf.get("Config::" + res[0] + "::DakConfig"):
758 return Cnf["Config::" + res[0] + "::DakConfig"]
760 return default_config
762 def which_apt_conf_file ():
763 res = socket.gethostbyaddr(socket.gethostname())
764 if Cnf.get("Config::" + res[0] + "::AptConfig"):
765 return Cnf["Config::" + res[0] + "::AptConfig"]
767 return default_apt_config
769 def which_alias_file():
770 hostname = socket.gethostbyaddr(socket.gethostname())[0]
771 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
772 if os.path.exists(aliasfn):
777 ################################################################################
779 # Escape characters which have meaning to SQL's regex comparison operator ('~')
780 # (woefully incomplete)
783 s = s.replace('+', '\\\\+')
784 s = s.replace('.', '\\\\.')
787 ################################################################################
789 def TemplateSubst(map, filename):
790 """ Perform a substition of template """
791 templatefile = open_file(filename)
792 template = templatefile.read()
794 template = template.replace(x,map[x])
798 ################################################################################
800 def fubar(msg, exit_code=1):
801 sys.stderr.write("E: %s\n" % (msg))
805 sys.stderr.write("W: %s\n" % (msg))
807 ################################################################################
809 # Returns the user name with a laughable attempt at rfc822 conformancy
810 # (read: removing stray periods).
812 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
814 ################################################################################
824 return ("%d%s" % (c, t))
826 ################################################################################
828 def cc_fix_changes (changes):
829 o = changes.get("architecture", "")
831 del changes["architecture"]
832 changes["architecture"] = {}
834 changes["architecture"][j] = 1
836 def changes_compare (a, b):
837 """ Sort by source name, source version, 'have source', and then by filename """
839 a_changes = parse_changes(a)
844 b_changes = parse_changes(b)
848 cc_fix_changes (a_changes)
849 cc_fix_changes (b_changes)
851 # Sort by source name
852 a_source = a_changes.get("source")
853 b_source = b_changes.get("source")
854 q = cmp (a_source, b_source)
858 # Sort by source version
859 a_version = a_changes.get("version", "0")
860 b_version = b_changes.get("version", "0")
861 q = apt_pkg.VersionCompare(a_version, b_version)
865 # Sort by 'have source'
866 a_has_source = a_changes["architecture"].get("source")
867 b_has_source = b_changes["architecture"].get("source")
868 if a_has_source and not b_has_source:
870 elif b_has_source and not a_has_source:
873 # Fall back to sort by filename
876 ################################################################################
878 def find_next_free (dest, too_many=100):
881 while os.path.exists(dest) and extra < too_many:
882 dest = orig_dest + '.' + repr(extra)
884 if extra >= too_many:
885 raise NoFreeFilenameError
888 ################################################################################
890 def result_join (original, sep = '\t'):
892 for i in xrange(len(original)):
893 if original[i] == None:
894 resultlist.append("")
896 resultlist.append(original[i])
897 return sep.join(resultlist)
899 ################################################################################
901 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
903 for line in str.split('\n'):
905 if line or include_blank_lines:
906 out += "%s%s\n" % (prefix, line)
907 # Strip trailing new line
912 ################################################################################
914 def validate_changes_file_arg(filename, require_changes=1):
916 'filename' is either a .changes or .dak file. If 'filename' is a
917 .dak file, it's changed to be the corresponding .changes file. The
918 function then checks if the .changes file a) exists and b) is
919 readable and returns the .changes filename if so. If there's a
920 problem, the next action depends on the option 'require_changes'
923 - If 'require_changes' == -1, errors are ignored and the .changes
924 filename is returned.
925 - If 'require_changes' == 0, a warning is given and 'None' is returned.
926 - If 'require_changes' == 1, a fatal error is raised.
931 orig_filename = filename
932 if filename.endswith(".dak"):
933 filename = filename[:-4]+".changes"
935 if not filename.endswith(".changes"):
936 error = "invalid file type; not a changes file"
938 if not os.access(filename,os.R_OK):
939 if os.path.exists(filename):
940 error = "permission denied"
942 error = "file not found"
945 if require_changes == 1:
946 fubar("%s: %s." % (orig_filename, error))
947 elif require_changes == 0:
948 warn("Skipping %s - %s" % (orig_filename, error))
950 else: # We only care about the .dak file
955 ################################################################################
958 return (arch != "source" and arch != "all")
960 ################################################################################
962 def join_with_commas_and(list):
963 if len(list) == 0: return "nothing"
964 if len(list) == 1: return list[0]
965 return ", ".join(list[:-1]) + " and " + list[-1]
967 ################################################################################
972 (pkg, version, constraint) = atom
974 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
977 pp_deps.append(pp_dep)
978 return " |".join(pp_deps)
980 ################################################################################
985 ################################################################################
987 def parse_args(Options):
988 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
992 for suite in split_args(Options["Suite"]):
993 suite_id = database.get_suite_id(suite)
995 warn("suite '%s' not recognised." % (suite))
997 suite_ids_list.append(suite_id)
999 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1001 fubar("No valid suite given.")
1006 if Options["Component"]:
1007 component_ids_list = []
1008 for component in split_args(Options["Component"]):
1009 component_id = database.get_component_id(component)
1010 if component_id == -1:
1011 warn("component '%s' not recognised." % (component))
1013 component_ids_list.append(component_id)
1014 if component_ids_list:
1015 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1017 fubar("No valid component given.")
1021 # Process architecture
1022 con_architectures = ""
1023 if Options["Architecture"]:
1026 for architecture in split_args(Options["Architecture"]):
1027 if architecture == "source":
1030 architecture_id = database.get_architecture_id(architecture)
1031 if architecture_id == -1:
1032 warn("architecture '%s' not recognised." % (architecture))
1034 arch_ids_list.append(architecture_id)
1036 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1038 if not check_source:
1039 fubar("No valid architecture given.")
1043 return (con_suites, con_architectures, con_components, check_source)
1045 ################################################################################
1047 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1048 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1051 tb = sys.exc_info()[2]
1058 frame = frame.f_back
1060 traceback.print_exc()
1062 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1063 frame.f_code.co_filename,
1065 for key, value in frame.f_locals.items():
1066 print "\t%20s = " % key,
1070 print "<unable to print>"
1072 ################################################################################
1074 def try_with_debug(function):
1082 ################################################################################
1084 def arch_compare_sw (a, b):
1086 Function for use in sorting lists of architectures.
1088 Sorts normally except that 'source' dominates all others.
1091 if a == "source" and b == "source":
1100 ################################################################################
1102 def split_args (s, dwim=1):
1104 Split command line arguments which can be separated by either commas
1105 or whitespace. If dwim is set, it will complain about string ending
1106 in comma since this usually means someone did 'dak ls -a i386, m68k
1107 foo' or something and the inevitable confusion resulting from 'm68k'
1108 being treated as an argument is undesirable.
1111 if s.find(",") == -1:
1114 if s[-1:] == "," and dwim:
1115 fubar("split_args: found trailing comma, spurious space maybe?")
1118 ################################################################################
1120 def Dict(**dict): return dict
1122 ########################################
1124 def gpgv_get_status_output(cmd, status_read, status_write):
1126 Our very own version of commands.getouputstatus(), hacked to support
1130 cmd = ['/bin/sh', '-c', cmd]
1131 p2cread, p2cwrite = os.pipe()
1132 c2pread, c2pwrite = os.pipe()
1133 errout, errin = os.pipe()
1143 for i in range(3, 256):
1144 if i != status_write:
1150 os.execvp(cmd[0], cmd)
1156 os.dup2(c2pread, c2pwrite)
1157 os.dup2(errout, errin)
1159 output = status = ""
1161 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1164 r = os.read(fd, 8196)
1166 more_data.append(fd)
1167 if fd == c2pwrite or fd == errin:
1169 elif fd == status_read:
1172 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1174 pid, exit_status = os.waitpid(pid, 0)
1176 os.close(status_write)
1177 os.close(status_read)
1187 return output, status, exit_status
1189 ################################################################################
1191 def process_gpgv_output(status):
1192 # Process the status-fd output
1195 for line in status.split('\n'):
1199 split = line.split()
1201 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1203 (gnupg, keyword) = split[:2]
1204 if gnupg != "[GNUPG:]":
1205 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1208 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1209 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1212 keywords[keyword] = args
1214 return (keywords, internal_error)
1216 ################################################################################
1218 def retrieve_key (filename, keyserver=None, keyring=None):
1220 Retrieve the key that signed 'filename' from 'keyserver' and
1221 add it to 'keyring'. Returns nothing on success, or an error message
1225 # Defaults for keyserver and keyring
1227 keyserver = Cnf["Dinstall::KeyServer"]
1229 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1231 # Ensure the filename contains no shell meta-characters or other badness
1232 if not re_taint_free.match(filename):
1233 return "%s: tainted filename" % (filename)
1235 # Invoke gpgv on the file
1236 status_read, status_write = os.pipe()
1237 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1238 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1240 # Process the status-fd output
1241 (keywords, internal_error) = process_gpgv_output(status)
1243 return internal_error
1245 if not keywords.has_key("NO_PUBKEY"):
1246 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1248 fingerprint = keywords["NO_PUBKEY"][0]
1249 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1250 # it'll try to create a lockfile in /dev. A better solution might
1251 # be a tempfile or something.
1252 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1253 % (Cnf["Dinstall::SigningKeyring"])
1254 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1255 % (keyring, keyserver, fingerprint)
1256 (result, output) = commands.getstatusoutput(cmd)
1258 return "'%s' failed with exit code %s" % (cmd, result)
1262 ################################################################################
1264 def gpg_keyring_args(keyrings=None):
1266 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1268 return " ".join(["--keyring %s" % x for x in keyrings])
1270 ################################################################################
1272 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1274 Check the signature of a file and return the fingerprint if the
1275 signature is valid or 'None' if it's not. The first argument is the
1276 filename whose signature should be checked. The second argument is a
1277 reject function and is called when an error is found. The reject()
1278 function must allow for two arguments: the first is the error message,
1279 the second is an optional prefix string. It's possible for reject()
1280 to be called more than once during an invocation of check_signature().
1281 The third argument is optional and is the name of the files the
1282 detached signature applies to. The fourth argument is optional and is
1283 a *list* of keyrings to use. 'autofetch' can either be None, True or
1284 False. If None, the default behaviour specified in the config will be
1288 # Ensure the filename contains no shell meta-characters or other badness
1289 if not re_taint_free.match(sig_filename):
1290 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1293 if data_filename and not re_taint_free.match(data_filename):
1294 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1298 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1300 # Autofetch the signing key if that's enabled
1301 if autofetch == None:
1302 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1304 error_msg = retrieve_key(sig_filename)
1309 # Build the command line
1310 status_read, status_write = os.pipe()
1311 cmd = "gpgv --status-fd %s %s %s %s" % (
1312 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1314 # Invoke gpgv on the file
1315 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1317 # Process the status-fd output
1318 (keywords, internal_error) = process_gpgv_output(status)
1320 # If we failed to parse the status-fd output, let's just whine and bail now
1322 reject("internal error while performing signature check on %s." % (sig_filename))
1323 reject(internal_error, "")
1324 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1328 # Now check for obviously bad things in the processed output
1329 if keywords.has_key("KEYREVOKED"):
1330 reject("The key used to sign %s has been revoked." % (sig_filename))
1332 if keywords.has_key("BADSIG"):
1333 reject("bad signature on %s." % (sig_filename))
1335 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1336 reject("failed to check signature on %s." % (sig_filename))
1338 if keywords.has_key("NO_PUBKEY"):
1339 args = keywords["NO_PUBKEY"]
1342 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1344 if keywords.has_key("BADARMOR"):
1345 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1347 if keywords.has_key("NODATA"):
1348 reject("no signature found in %s." % (sig_filename))
1350 if keywords.has_key("EXPKEYSIG"):
1351 args = keywords["EXPKEYSIG"]
1354 reject("Signature made by expired key 0x%s" % (key))
1356 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1357 args = keywords["KEYEXPIRED"]
1361 if timestamp.count("T") == 0:
1363 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1365 expiredate = "unknown (%s)" % (timestamp)
1367 expiredate = timestamp
1368 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1374 # Next check gpgv exited with a zero return code
1376 reject("gpgv failed while checking %s." % (sig_filename))
1378 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1380 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1383 # Sanity check the good stuff we expect
1384 if not keywords.has_key("VALIDSIG"):
1385 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1388 args = keywords["VALIDSIG"]
1390 reject("internal error while checking signature on %s." % (sig_filename))
1393 fingerprint = args[0]
1394 if not keywords.has_key("GOODSIG"):
1395 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1397 if not keywords.has_key("SIG_ID"):
1398 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1401 # Finally ensure there's not something we don't recognise
1402 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1403 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1404 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1406 for keyword in keywords.keys():
1407 if not known_keywords.has_key(keyword):
1408 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1416 ################################################################################
1418 def gpg_get_key_addresses(fingerprint):
1419 """retreive email addresses from gpg key uids for a given fingerprint"""
1420 addresses = key_uid_email_cache.get(fingerprint)
1421 if addresses != None:
1424 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1425 % (gpg_keyring_args(), fingerprint)
1426 (result, output) = commands.getstatusoutput(cmd)
1428 for l in output.split('\n'):
1429 m = re_gpg_uid.match(l)
1431 addresses.add(m.group(1))
1432 key_uid_email_cache[fingerprint] = addresses
1435 ################################################################################
1437 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1439 def wrap(paragraph, max_length, prefix=""):
1443 words = paragraph.split()
1446 word_size = len(word)
1447 if word_size > max_length:
1449 s += line + '\n' + prefix
1450 s += word + '\n' + prefix
1453 new_length = len(line) + word_size + 1
1454 if new_length > max_length:
1455 s += line + '\n' + prefix
1468 ################################################################################
1470 def clean_symlink (src, dest, root):
1472 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1475 src = src.replace(root, '', 1)
1476 dest = dest.replace(root, '', 1)
1477 dest = os.path.dirname(dest)
1478 new_src = '../' * len(dest.split('/'))
1479 return new_src + src
1481 ################################################################################
1483 def temp_filename(directory=None, prefix="dak", suffix=""):
1485 Return a secure and unique filename by pre-creating it.
1486 If 'directory' is non-null, it will be the directory the file is pre-created in.
1487 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1488 If 'suffix' is non-null, the filename will end with it.
1490 Returns a pair (fd, name).
1493 return tempfile.mkstemp(suffix, prefix, directory)
1495 ################################################################################
1497 def temp_dirname(parent=None, prefix="dak", suffix=""):
1499 Return a secure and unique directory by pre-creating it.
1500 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1501 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1502 If 'suffix' is non-null, the filename will end with it.
1504 Returns a pathname to the new directory
1507 return tempfile.mkdtemp(suffix, prefix, parent)
1509 ################################################################################
1511 def is_email_alias(email):
1512 """ checks if the user part of the email is listed in the alias file """
1514 if alias_cache == None:
1515 aliasfn = which_alias_file()
1518 for l in open(aliasfn):
1519 alias_cache.add(l.split(':')[0])
1520 uid = email.split('@')[0]
1521 return uid in alias_cache
1523 ################################################################################
1525 def get_changes_files(dir):
1527 Takes a directory and lists all .changes files in it (as well as chdir'ing
1528 to the directory; this is due to broken behaviour on the part of p-u/p-a
1529 when you're not in the right place)
1531 Returns a list of filenames
1534 # Much of the rest of p-u/p-a depends on being in the right place
1536 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1538 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1540 return changes_files
1542 ################################################################################
1546 Cnf = apt_pkg.newConfiguration()
1547 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1549 if which_conf_file() != default_config:
1550 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1552 ###############################################################################