2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
43 from dak_exceptions import *
44 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
45 re_multi_line_field, re_srchasver, re_verwithext, \
46 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark
48 ################################################################################
50 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
51 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
53 alias_cache = None #: Cache for email alias checks
54 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
56 # (hashname, function, earliest_changes_version)
57 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
58 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
60 ################################################################################
63 """ Escape html chars """
64 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
66 ################################################################################
68 def open_file(filename, mode='r'):
70 Open C{file}, return fileobject.
72 @type filename: string
73 @param filename: path/filename to open
76 @param mode: open mode
79 @return: open fileobject
81 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
85 f = open(filename, mode)
87 raise CantOpenError, filename
90 ################################################################################
92 def our_raw_input(prompt=""):
94 sys.stdout.write(prompt)
100 sys.stderr.write("\nUser interrupt (^D).\n")
103 ################################################################################
105 def extract_component_from_section(section):
108 if section.find('/') != -1:
109 component = section.split('/')[0]
111 # Expand default component
113 if Cnf.has_key("Component::%s" % section):
118 return (section, component)
120 ################################################################################
122 def parse_deb822(contents, signing_rules=0):
126 # Split the lines in the input, keeping the linebreaks.
127 lines = contents.splitlines(True)
130 raise ParseChangesError, "[Empty changes file]"
132 # Reindex by line number so we can easily verify the format of
138 indexed_lines[index] = line[:-1]
142 num_of_lines = len(indexed_lines.keys())
145 while index < num_of_lines:
147 line = indexed_lines[index]
149 if signing_rules == 1:
151 if index > num_of_lines:
152 raise InvalidDscError, index
153 line = indexed_lines[index]
154 if not line.startswith("-----BEGIN PGP SIGNATURE"):
155 raise InvalidDscError, index
160 if line.startswith("-----BEGIN PGP SIGNATURE"):
162 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
164 if signing_rules == 1:
165 while index < num_of_lines and line != "":
167 line = indexed_lines[index]
169 # If we're not inside the signed data, don't process anything
170 if signing_rules >= 0 and not inside_signature:
172 slf = re_single_line_field.match(line)
174 field = slf.groups()[0].lower()
175 changes[field] = slf.groups()[1]
179 changes[field] += '\n'
181 mlf = re_multi_line_field.match(line)
184 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
185 if first == 1 and changes[field] != "":
186 changes[field] += '\n'
188 changes[field] += mlf.groups()[0] + '\n'
192 if signing_rules == 1 and inside_signature:
193 raise InvalidDscError, index
195 changes["filecontents"] = "".join(lines)
197 if changes.has_key("source"):
198 # Strip the source version in brackets from the source field,
199 # put it in the "source-version" field instead.
200 srcver = re_srchasver.search(changes["source"])
202 changes["source"] = srcver.group(1)
203 changes["source-version"] = srcver.group(2)
206 raise ParseChangesError, error
210 ################################################################################
212 def parse_changes(filename, signing_rules=0):
214 Parses a changes file and returns a dictionary where each field is a
215 key. The mandatory first argument is the filename of the .changes
218 signing_rules is an optional argument:
220 - If signing_rules == -1, no signature is required.
221 - If signing_rules == 0 (the default), a signature is required.
222 - If signing_rules == 1, it turns on the same strict format checking
225 The rules for (signing_rules == 1)-mode are:
227 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
228 followed by any PGP header data and must end with a blank line.
230 - The data section must end with a blank line and must be followed by
231 "-----BEGIN PGP SIGNATURE-----".
234 changes_in = open_file(filename)
235 content = changes_in.read()
238 unicode(content, 'utf-8')
240 raise ChangesUnicodeError, "Changes file not proper utf-8"
241 return parse_deb822(content, signing_rules)
243 ################################################################################
245 def hash_key(hashname):
246 return '%ssum' % hashname
248 ################################################################################
250 def create_hash(where, files, hashname, hashfunc):
252 create_hash extends the passed files dict with the given hash by
253 iterating over all files on disk and passing them to the hashing
258 for f in files.keys():
260 file_handle = open_file(f)
261 except CantOpenError:
262 rejmsg.append("Could not open file %s for checksumming" % (f))
265 files[f][hash_key(hashname)] = hashfunc(file_handle)
270 ################################################################################
272 def check_hash(where, files, hashname, hashfunc):
274 check_hash checks the given hash in the files dict against the actual
275 files on disk. The hash values need to be present consistently in
276 all file entries. It does not modify its input in any way.
280 for f in files.keys():
284 file_handle = open_file(f)
286 # Check for the hash entry, to not trigger a KeyError.
287 if not files[f].has_key(hash_key(hashname)):
288 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
292 # Actually check the hash for correctness.
293 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
294 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
296 except CantOpenError:
297 # TODO: This happens when the file is in the pool.
298 # warn("Cannot open file %s" % f)
305 ################################################################################
307 def check_size(where, files):
309 check_size checks the file sizes in the passed files dict against the
314 for f in files.keys():
319 # TODO: This happens when the file is in the pool.
323 actual_size = entry[stat.ST_SIZE]
324 size = int(files[f]["size"])
325 if size != actual_size:
326 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
327 % (f, actual_size, size, where))
330 ################################################################################
332 def check_hash_fields(what, manifest):
334 check_hash_fields ensures that there are no checksum fields in the
335 given dict that we do not know about.
339 hashes = map(lambda x: x[0], known_hashes)
340 for field in manifest:
341 if field.startswith("checksums-"):
342 hashname = field.split("-",1)[1]
343 if hashname not in hashes:
344 rejmsg.append("Unsupported checksum field for %s "\
345 "in %s" % (hashname, what))
348 ################################################################################
350 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
351 if format >= version:
352 # The version should contain the specified hash.
355 # Import hashes from the changes
356 rejmsg = parse_checksums(".changes", files, changes, hashname)
360 # We need to calculate the hash because it can't possibly
363 return func(".changes", files, hashname, hashfunc)
365 # We could add the orig which might be in the pool to the files dict to
366 # access the checksums easily.
368 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
370 ensure_dsc_hashes' task is to ensure that each and every *present* hash
371 in the dsc is correct, i.e. identical to the changes file and if necessary
372 the pool. The latter task is delegated to check_hash.
376 if not dsc.has_key('Checksums-%s' % (hashname,)):
378 # Import hashes from the dsc
379 parse_checksums(".dsc", dsc_files, dsc, hashname)
381 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
384 ################################################################################
386 def ensure_hashes(changes, dsc, files, dsc_files):
389 # Make sure we recognise the format of the Files: field in the .changes
390 format = changes.get("format", "0.0").split(".", 1)
392 format = int(format[0]), int(format[1])
394 format = int(float(format[0])), 0
396 # We need to deal with the original changes blob, as the fields we need
397 # might not be in the changes dict serialised into the .dak anymore.
398 orig_changes = parse_deb822(changes['filecontents'])
400 # Copy the checksums over to the current changes dict. This will keep
401 # the existing modifications to it intact.
402 for field in orig_changes:
403 if field.startswith('checksums-'):
404 changes[field] = orig_changes[field]
406 # Check for unsupported hashes
407 rejmsg.extend(check_hash_fields(".changes", changes))
408 rejmsg.extend(check_hash_fields(".dsc", dsc))
410 # We have to calculate the hash if we have an earlier changes version than
411 # the hash appears in rather than require it exist in the changes file
412 for hashname, hashfunc, version in known_hashes:
413 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
415 if "source" in changes["architecture"]:
416 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
421 def parse_checksums(where, files, manifest, hashname):
423 field = 'checksums-%s' % hashname
424 if not field in manifest:
426 for line in manifest[field].split('\n'):
429 checksum, size, checkfile = line.strip().split(' ')
430 if not files.has_key(checkfile):
431 # TODO: check for the file's entry in the original files dict, not
432 # the one modified by (auto)byhand and other weird stuff
433 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
434 # (file, hashname, where))
436 if not files[checkfile]["size"] == size:
437 rejmsg.append("%s: size differs for files and checksums-%s entry "\
438 "in %s" % (checkfile, hashname, where))
440 files[checkfile][hash_key(hashname)] = checksum
441 for f in files.keys():
442 if not files[f].has_key(hash_key(hashname)):
443 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
447 ################################################################################
449 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
451 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
454 # Make sure we have a Files: field to parse...
455 if not changes.has_key(field):
456 raise NoFilesFieldError
458 # Make sure we recognise the format of the Files: field
459 format = re_verwithext.search(changes.get("format", "0.0"))
461 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
463 format = format.groups()
464 if format[1] == None:
465 format = int(float(format[0])), 0, format[2]
467 format = int(format[0]), int(format[1]), format[2]
468 if format[2] == None:
472 # format = (1,0) are the only formats we currently accept,
473 # format = (0,0) are missing format headers of which we still
474 # have some in the archive.
475 if format != (1,0) and format != (0,0):
476 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
478 if (format < (1,5) or format > (1,8)):
479 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
480 if field != "files" and format < (1,8):
481 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
483 includes_section = (not is_a_dsc) and field == "files"
485 # Parse each entry/line:
486 for i in changes[field].split('\n'):
490 section = priority = ""
493 (md5, size, section, priority, name) = s
495 (md5, size, name) = s
497 raise ParseChangesError, i
504 (section, component) = extract_component_from_section(section)
506 files[name] = Dict(size=size, section=section,
507 priority=priority, component=component)
508 files[name][hashname] = md5
512 ################################################################################
514 def force_to_utf8(s):
516 Forces a string to UTF-8. If the string isn't already UTF-8,
517 it's assumed to be ISO-8859-1.
523 latin1_s = unicode(s,'iso8859-1')
524 return latin1_s.encode('utf-8')
526 def rfc2047_encode(s):
528 Encodes a (header) string per RFC2047 if necessary. If the
529 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1.
532 codecs.lookup('ascii')[1](s)
537 codecs.lookup('utf-8')[1](s)
538 h = email.Header.Header(s, 'utf-8', 998)
541 h = email.Header.Header(s, 'iso-8859-1', 998)
544 ################################################################################
546 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
547 # with it. I know - I'll fix the suckage and make things
550 def fix_maintainer (maintainer):
552 Parses a Maintainer or Changed-By field and returns:
553 1. an RFC822 compatible version,
554 2. an RFC2047 compatible version,
558 The name is forced to UTF-8 for both 1. and 3.. If the name field
559 contains '.' or ',' (as allowed by Debian policy), 1. and 2. are
560 switched to 'email (name)' format.
563 maintainer = maintainer.strip()
565 return ('', '', '', '')
567 if maintainer.find("<") == -1:
570 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
571 email = maintainer[1:-1]
574 m = re_parse_maintainer.match(maintainer)
576 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
580 # Get an RFC2047 compliant version of the name
581 rfc2047_name = rfc2047_encode(name)
583 # Force the name to be UTF-8
584 name = force_to_utf8(name)
586 if name.find(',') != -1 or name.find('.') != -1:
587 rfc822_maint = "%s (%s)" % (email, name)
588 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
590 rfc822_maint = "%s <%s>" % (name, email)
591 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
593 if email.find("@") == -1 and email.find("buildd_") != 0:
594 raise ParseMaintError, "No @ found in email address part."
596 return (rfc822_maint, rfc2047_maint, name, email)
598 ################################################################################
600 def send_mail (message, filename=""):
601 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
603 # If we've been passed a string dump it into a temporary file
605 (fd, filename) = tempfile.mkstemp()
606 os.write (fd, message)
609 if Cnf.has_key("Dinstall::MailWhiteList") and \
610 Cnf["Dinstall::MailWhiteList"] != "":
611 message_in = open_file(filename)
612 message_raw = modemail.message_from_file(message_in)
616 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
618 for line in whitelist_in:
619 if re_re_mark.match(line):
620 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
622 whitelist.append(re.compile(re.escape(line.strip())))
627 fields = ["To", "Bcc", "Cc"]
630 value = message_raw.get(field, None)
633 for item in value.split(","):
634 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
640 if not mail_whitelisted:
641 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
645 # Doesn't have any mail in whitelist so remove the header
647 del message_raw[field]
649 message_raw.replace_header(field, string.join(match, ", "))
651 # Change message fields in order if we don't have a To header
652 if not message_raw.has_key("To"):
655 if message_raw.has_key(field):
656 message_raw[fields[-1]] = message_raw[field]
657 del message_raw[field]
660 # Clean up any temporary files
661 # and return, as we removed all recipients.
663 os.unlink (filename);
666 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
667 os.write (fd, message_raw.as_string(True));
671 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
673 raise SendmailFailedError, output
675 # Clean up any temporary files
679 ################################################################################
681 def poolify (source, component):
684 if source[:3] == "lib":
685 return component + source[:4] + '/' + source + '/'
687 return component + source[:1] + '/' + source + '/'
689 ################################################################################
691 def move (src, dest, overwrite = 0, perms = 0664):
692 if os.path.exists(dest) and os.path.isdir(dest):
695 dest_dir = os.path.dirname(dest)
696 if not os.path.exists(dest_dir):
697 umask = os.umask(00000)
698 os.makedirs(dest_dir, 02775)
700 #print "Moving %s to %s..." % (src, dest)
701 if os.path.exists(dest) and os.path.isdir(dest):
702 dest += '/' + os.path.basename(src)
703 # Don't overwrite unless forced to
704 if os.path.exists(dest):
706 fubar("Can't move %s to %s - file already exists." % (src, dest))
708 if not os.access(dest, os.W_OK):
709 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
710 shutil.copy2(src, dest)
711 os.chmod(dest, perms)
714 def copy (src, dest, overwrite = 0, perms = 0664):
715 if os.path.exists(dest) and os.path.isdir(dest):
718 dest_dir = os.path.dirname(dest)
719 if not os.path.exists(dest_dir):
720 umask = os.umask(00000)
721 os.makedirs(dest_dir, 02775)
723 #print "Copying %s to %s..." % (src, dest)
724 if os.path.exists(dest) and os.path.isdir(dest):
725 dest += '/' + os.path.basename(src)
726 # Don't overwrite unless forced to
727 if os.path.exists(dest):
729 raise FileExistsError
731 if not os.access(dest, os.W_OK):
732 raise CantOverwriteError
733 shutil.copy2(src, dest)
734 os.chmod(dest, perms)
736 ################################################################################
739 res = socket.gethostbyaddr(socket.gethostname())
740 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
741 if database_hostname:
742 return database_hostname
746 def which_conf_file ():
747 res = socket.gethostbyaddr(socket.gethostname())
748 # In case we allow local config files per user, try if one exists
749 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
750 homedir = os.getenv("HOME")
751 confpath = os.path.join(homedir, "/etc/dak.conf")
752 if os.path.exists(confpath):
753 apt_pkg.ReadConfigFileISC(Cnf,default_config)
755 # We are still in here, so there is no local config file or we do
756 # not allow local files. Do the normal stuff.
757 if Cnf.get("Config::" + res[0] + "::DakConfig"):
758 return Cnf["Config::" + res[0] + "::DakConfig"]
760 return default_config
762 def which_apt_conf_file ():
763 res = socket.gethostbyaddr(socket.gethostname())
764 # In case we allow local config files per user, try if one exists
765 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
766 homedir = os.getenv("HOME")
767 confpath = os.path.join(homedir, "/etc/dak.conf")
768 if os.path.exists(confpath):
769 apt_pkg.ReadConfigFileISC(Cnf,default_config)
771 if Cnf.get("Config::" + res[0] + "::AptConfig"):
772 return Cnf["Config::" + res[0] + "::AptConfig"]
774 return default_apt_config
776 def which_alias_file():
777 hostname = socket.gethostbyaddr(socket.gethostname())[0]
778 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
779 if os.path.exists(aliasfn):
784 ################################################################################
786 # Escape characters which have meaning to SQL's regex comparison operator ('~')
787 # (woefully incomplete)
790 s = s.replace('+', '\\\\+')
791 s = s.replace('.', '\\\\.')
794 ################################################################################
796 def TemplateSubst(map, filename):
797 """ Perform a substition of template """
798 templatefile = open_file(filename)
799 template = templatefile.read()
801 template = template.replace(x,map[x])
805 ################################################################################
807 def fubar(msg, exit_code=1):
808 sys.stderr.write("E: %s\n" % (msg))
812 sys.stderr.write("W: %s\n" % (msg))
814 ################################################################################
816 # Returns the user name with a laughable attempt at rfc822 conformancy
817 # (read: removing stray periods).
819 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
821 ################################################################################
831 return ("%d%s" % (c, t))
833 ################################################################################
835 def cc_fix_changes (changes):
836 o = changes.get("architecture", "")
838 del changes["architecture"]
839 changes["architecture"] = {}
841 changes["architecture"][j] = 1
843 def changes_compare (a, b):
844 """ Sort by source name, source version, 'have source', and then by filename """
846 a_changes = parse_changes(a)
851 b_changes = parse_changes(b)
855 cc_fix_changes (a_changes)
856 cc_fix_changes (b_changes)
858 # Sort by source name
859 a_source = a_changes.get("source")
860 b_source = b_changes.get("source")
861 q = cmp (a_source, b_source)
865 # Sort by source version
866 a_version = a_changes.get("version", "0")
867 b_version = b_changes.get("version", "0")
868 q = apt_pkg.VersionCompare(a_version, b_version)
872 # Sort by 'have source'
873 a_has_source = a_changes["architecture"].get("source")
874 b_has_source = b_changes["architecture"].get("source")
875 if a_has_source and not b_has_source:
877 elif b_has_source and not a_has_source:
880 # Fall back to sort by filename
883 ################################################################################
885 def find_next_free (dest, too_many=100):
888 while os.path.exists(dest) and extra < too_many:
889 dest = orig_dest + '.' + repr(extra)
891 if extra >= too_many:
892 raise NoFreeFilenameError
895 ################################################################################
897 def result_join (original, sep = '\t'):
899 for i in xrange(len(original)):
900 if original[i] == None:
901 resultlist.append("")
903 resultlist.append(original[i])
904 return sep.join(resultlist)
906 ################################################################################
908 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
910 for line in str.split('\n'):
912 if line or include_blank_lines:
913 out += "%s%s\n" % (prefix, line)
914 # Strip trailing new line
919 ################################################################################
921 def validate_changes_file_arg(filename, require_changes=1):
923 'filename' is either a .changes or .dak file. If 'filename' is a
924 .dak file, it's changed to be the corresponding .changes file. The
925 function then checks if the .changes file a) exists and b) is
926 readable and returns the .changes filename if so. If there's a
927 problem, the next action depends on the option 'require_changes'
930 - If 'require_changes' == -1, errors are ignored and the .changes
931 filename is returned.
932 - If 'require_changes' == 0, a warning is given and 'None' is returned.
933 - If 'require_changes' == 1, a fatal error is raised.
938 orig_filename = filename
939 if filename.endswith(".dak"):
940 filename = filename[:-4]+".changes"
942 if not filename.endswith(".changes"):
943 error = "invalid file type; not a changes file"
945 if not os.access(filename,os.R_OK):
946 if os.path.exists(filename):
947 error = "permission denied"
949 error = "file not found"
952 if require_changes == 1:
953 fubar("%s: %s." % (orig_filename, error))
954 elif require_changes == 0:
955 warn("Skipping %s - %s" % (orig_filename, error))
957 else: # We only care about the .dak file
962 ################################################################################
965 return (arch != "source" and arch != "all")
967 ################################################################################
969 def join_with_commas_and(list):
970 if len(list) == 0: return "nothing"
971 if len(list) == 1: return list[0]
972 return ", ".join(list[:-1]) + " and " + list[-1]
974 ################################################################################
979 (pkg, version, constraint) = atom
981 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
984 pp_deps.append(pp_dep)
985 return " |".join(pp_deps)
987 ################################################################################
992 ################################################################################
994 def parse_args(Options):
995 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
999 for suite in split_args(Options["Suite"]):
1000 suite_id = database.get_suite_id(suite)
1002 warn("suite '%s' not recognised." % (suite))
1004 suite_ids_list.append(suite_id)
1006 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1008 fubar("No valid suite given.")
1013 if Options["Component"]:
1014 component_ids_list = []
1015 for component in split_args(Options["Component"]):
1016 component_id = database.get_component_id(component)
1017 if component_id == -1:
1018 warn("component '%s' not recognised." % (component))
1020 component_ids_list.append(component_id)
1021 if component_ids_list:
1022 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1024 fubar("No valid component given.")
1028 # Process architecture
1029 con_architectures = ""
1030 if Options["Architecture"]:
1033 for architecture in split_args(Options["Architecture"]):
1034 if architecture == "source":
1037 architecture_id = database.get_architecture_id(architecture)
1038 if architecture_id == -1:
1039 warn("architecture '%s' not recognised." % (architecture))
1041 arch_ids_list.append(architecture_id)
1043 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1045 if not check_source:
1046 fubar("No valid architecture given.")
1050 return (con_suites, con_architectures, con_components, check_source)
1052 ################################################################################
1054 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1055 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1058 tb = sys.exc_info()[2]
1065 frame = frame.f_back
1067 traceback.print_exc()
1069 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1070 frame.f_code.co_filename,
1072 for key, value in frame.f_locals.items():
1073 print "\t%20s = " % key,
1077 print "<unable to print>"
1079 ################################################################################
1081 def try_with_debug(function):
1089 ################################################################################
1091 def arch_compare_sw (a, b):
1093 Function for use in sorting lists of architectures.
1095 Sorts normally except that 'source' dominates all others.
1098 if a == "source" and b == "source":
1107 ################################################################################
1109 def split_args (s, dwim=1):
1111 Split command line arguments which can be separated by either commas
1112 or whitespace. If dwim is set, it will complain about string ending
1113 in comma since this usually means someone did 'dak ls -a i386, m68k
1114 foo' or something and the inevitable confusion resulting from 'm68k'
1115 being treated as an argument is undesirable.
1118 if s.find(",") == -1:
1121 if s[-1:] == "," and dwim:
1122 fubar("split_args: found trailing comma, spurious space maybe?")
1125 ################################################################################
1127 def Dict(**dict): return dict
1129 ########################################
1131 def gpgv_get_status_output(cmd, status_read, status_write):
1133 Our very own version of commands.getouputstatus(), hacked to support
1137 cmd = ['/bin/sh', '-c', cmd]
1138 p2cread, p2cwrite = os.pipe()
1139 c2pread, c2pwrite = os.pipe()
1140 errout, errin = os.pipe()
1150 for i in range(3, 256):
1151 if i != status_write:
1157 os.execvp(cmd[0], cmd)
1163 os.dup2(c2pread, c2pwrite)
1164 os.dup2(errout, errin)
1166 output = status = ""
1168 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1171 r = os.read(fd, 8196)
1173 more_data.append(fd)
1174 if fd == c2pwrite or fd == errin:
1176 elif fd == status_read:
1179 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1181 pid, exit_status = os.waitpid(pid, 0)
1183 os.close(status_write)
1184 os.close(status_read)
1194 return output, status, exit_status
1196 ################################################################################
1198 def process_gpgv_output(status):
1199 # Process the status-fd output
1202 for line in status.split('\n'):
1206 split = line.split()
1208 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1210 (gnupg, keyword) = split[:2]
1211 if gnupg != "[GNUPG:]":
1212 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1215 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1216 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1219 keywords[keyword] = args
1221 return (keywords, internal_error)
1223 ################################################################################
1225 def retrieve_key (filename, keyserver=None, keyring=None):
1227 Retrieve the key that signed 'filename' from 'keyserver' and
1228 add it to 'keyring'. Returns nothing on success, or an error message
1232 # Defaults for keyserver and keyring
1234 keyserver = Cnf["Dinstall::KeyServer"]
1236 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1238 # Ensure the filename contains no shell meta-characters or other badness
1239 if not re_taint_free.match(filename):
1240 return "%s: tainted filename" % (filename)
1242 # Invoke gpgv on the file
1243 status_read, status_write = os.pipe()
1244 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1245 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1247 # Process the status-fd output
1248 (keywords, internal_error) = process_gpgv_output(status)
1250 return internal_error
1252 if not keywords.has_key("NO_PUBKEY"):
1253 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1255 fingerprint = keywords["NO_PUBKEY"][0]
1256 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1257 # it'll try to create a lockfile in /dev. A better solution might
1258 # be a tempfile or something.
1259 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1260 % (Cnf["Dinstall::SigningKeyring"])
1261 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1262 % (keyring, keyserver, fingerprint)
1263 (result, output) = commands.getstatusoutput(cmd)
1265 return "'%s' failed with exit code %s" % (cmd, result)
1269 ################################################################################
1271 def gpg_keyring_args(keyrings=None):
1273 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1275 return " ".join(["--keyring %s" % x for x in keyrings])
1277 ################################################################################
1279 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1281 Check the signature of a file and return the fingerprint if the
1282 signature is valid or 'None' if it's not. The first argument is the
1283 filename whose signature should be checked. The second argument is a
1284 reject function and is called when an error is found. The reject()
1285 function must allow for two arguments: the first is the error message,
1286 the second is an optional prefix string. It's possible for reject()
1287 to be called more than once during an invocation of check_signature().
1288 The third argument is optional and is the name of the files the
1289 detached signature applies to. The fourth argument is optional and is
1290 a *list* of keyrings to use. 'autofetch' can either be None, True or
1291 False. If None, the default behaviour specified in the config will be
1295 # Ensure the filename contains no shell meta-characters or other badness
1296 if not re_taint_free.match(sig_filename):
1297 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1300 if data_filename and not re_taint_free.match(data_filename):
1301 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1305 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1307 # Autofetch the signing key if that's enabled
1308 if autofetch == None:
1309 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1311 error_msg = retrieve_key(sig_filename)
1316 # Build the command line
1317 status_read, status_write = os.pipe()
1318 cmd = "gpgv --status-fd %s %s %s %s" % (
1319 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1321 # Invoke gpgv on the file
1322 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1324 # Process the status-fd output
1325 (keywords, internal_error) = process_gpgv_output(status)
1327 # If we failed to parse the status-fd output, let's just whine and bail now
1329 reject("internal error while performing signature check on %s." % (sig_filename))
1330 reject(internal_error, "")
1331 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1335 # Now check for obviously bad things in the processed output
1336 if keywords.has_key("KEYREVOKED"):
1337 reject("The key used to sign %s has been revoked." % (sig_filename))
1339 if keywords.has_key("BADSIG"):
1340 reject("bad signature on %s." % (sig_filename))
1342 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1343 reject("failed to check signature on %s." % (sig_filename))
1345 if keywords.has_key("NO_PUBKEY"):
1346 args = keywords["NO_PUBKEY"]
1349 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1351 if keywords.has_key("BADARMOR"):
1352 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1354 if keywords.has_key("NODATA"):
1355 reject("no signature found in %s." % (sig_filename))
1357 if keywords.has_key("EXPKEYSIG"):
1358 args = keywords["EXPKEYSIG"]
1361 reject("Signature made by expired key 0x%s" % (key))
1363 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1364 args = keywords["KEYEXPIRED"]
1368 if timestamp.count("T") == 0:
1370 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1372 expiredate = "unknown (%s)" % (timestamp)
1374 expiredate = timestamp
1375 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1381 # Next check gpgv exited with a zero return code
1383 reject("gpgv failed while checking %s." % (sig_filename))
1385 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1387 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1390 # Sanity check the good stuff we expect
1391 if not keywords.has_key("VALIDSIG"):
1392 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1395 args = keywords["VALIDSIG"]
1397 reject("internal error while checking signature on %s." % (sig_filename))
1400 fingerprint = args[0]
1401 if not keywords.has_key("GOODSIG"):
1402 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1404 if not keywords.has_key("SIG_ID"):
1405 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1408 # Finally ensure there's not something we don't recognise
1409 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1410 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1411 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1413 for keyword in keywords.keys():
1414 if not known_keywords.has_key(keyword):
1415 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1423 ################################################################################
1425 def gpg_get_key_addresses(fingerprint):
1426 """retreive email addresses from gpg key uids for a given fingerprint"""
1427 addresses = key_uid_email_cache.get(fingerprint)
1428 if addresses != None:
1431 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1432 % (gpg_keyring_args(), fingerprint)
1433 (result, output) = commands.getstatusoutput(cmd)
1435 for l in output.split('\n'):
1436 m = re_gpg_uid.match(l)
1438 addresses.add(m.group(1))
1439 key_uid_email_cache[fingerprint] = addresses
1442 ################################################################################
1444 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1446 def wrap(paragraph, max_length, prefix=""):
1450 words = paragraph.split()
1453 word_size = len(word)
1454 if word_size > max_length:
1456 s += line + '\n' + prefix
1457 s += word + '\n' + prefix
1460 new_length = len(line) + word_size + 1
1461 if new_length > max_length:
1462 s += line + '\n' + prefix
1475 ################################################################################
1477 def clean_symlink (src, dest, root):
1479 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1482 src = src.replace(root, '', 1)
1483 dest = dest.replace(root, '', 1)
1484 dest = os.path.dirname(dest)
1485 new_src = '../' * len(dest.split('/'))
1486 return new_src + src
1488 ################################################################################
1490 def temp_filename(directory=None, prefix="dak", suffix=""):
1492 Return a secure and unique filename by pre-creating it.
1493 If 'directory' is non-null, it will be the directory the file is pre-created in.
1494 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1495 If 'suffix' is non-null, the filename will end with it.
1497 Returns a pair (fd, name).
1500 return tempfile.mkstemp(suffix, prefix, directory)
1502 ################################################################################
1504 def temp_dirname(parent=None, prefix="dak", suffix=""):
1506 Return a secure and unique directory by pre-creating it.
1507 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1508 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1509 If 'suffix' is non-null, the filename will end with it.
1511 Returns a pathname to the new directory
1514 return tempfile.mkdtemp(suffix, prefix, parent)
1516 ################################################################################
1518 def is_email_alias(email):
1519 """ checks if the user part of the email is listed in the alias file """
1521 if alias_cache == None:
1522 aliasfn = which_alias_file()
1525 for l in open(aliasfn):
1526 alias_cache.add(l.split(':')[0])
1527 uid = email.split('@')[0]
1528 return uid in alias_cache
1530 ################################################################################
1532 def get_changes_files(dir):
1534 Takes a directory and lists all .changes files in it (as well as chdir'ing
1535 to the directory; this is due to broken behaviour on the part of p-u/p-a
1536 when you're not in the right place)
1538 Returns a list of filenames
1541 # Much of the rest of p-u/p-a depends on being in the right place
1543 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1545 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1547 return changes_files
1549 ################################################################################
1553 Cnf = apt_pkg.newConfiguration()
1554 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1556 if which_conf_file() != default_config:
1557 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1559 ###############################################################################