2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
42 import email as modemail
43 from dak_exceptions import *
44 from textutils import fix_maintainer
45 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
46 re_multi_line_field, re_srchasver, re_verwithext, \
47 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
50 ################################################################################
52 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
53 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
55 alias_cache = None #: Cache for email alias checks
56 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
58 # (hashname, function, earliest_changes_version)
59 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
60 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
62 ################################################################################
65 """ Escape html chars """
66 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
68 ################################################################################
70 def open_file(filename, mode='r'):
72 Open C{file}, return fileobject.
74 @type filename: string
75 @param filename: path/filename to open
78 @param mode: open mode
81 @return: open fileobject
83 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
87 f = open(filename, mode)
89 raise CantOpenError, filename
92 ################################################################################
94 def our_raw_input(prompt=""):
96 sys.stdout.write(prompt)
102 sys.stderr.write("\nUser interrupt (^D).\n")
105 ################################################################################
107 def extract_component_from_section(section):
110 if section.find('/') != -1:
111 component = section.split('/')[0]
113 # Expand default component
115 if Cnf.has_key("Component::%s" % section):
120 return (section, component)
122 ################################################################################
124 def parse_deb822(contents, signing_rules=0):
128 # Split the lines in the input, keeping the linebreaks.
129 lines = contents.splitlines(True)
132 raise ParseChangesError, "[Empty changes file]"
134 # Reindex by line number so we can easily verify the format of
140 indexed_lines[index] = line[:-1]
144 num_of_lines = len(indexed_lines.keys())
147 while index < num_of_lines:
149 line = indexed_lines[index]
151 if signing_rules == 1:
153 if index > num_of_lines:
154 raise InvalidDscError, index
155 line = indexed_lines[index]
156 if not line.startswith("-----BEGIN PGP SIGNATURE"):
157 raise InvalidDscError, index
162 if line.startswith("-----BEGIN PGP SIGNATURE"):
164 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
166 if signing_rules == 1:
167 while index < num_of_lines and line != "":
169 line = indexed_lines[index]
171 # If we're not inside the signed data, don't process anything
172 if signing_rules >= 0 and not inside_signature:
174 slf = re_single_line_field.match(line)
176 field = slf.groups()[0].lower()
177 changes[field] = slf.groups()[1]
181 changes[field] += '\n'
183 mlf = re_multi_line_field.match(line)
186 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
187 if first == 1 and changes[field] != "":
188 changes[field] += '\n'
190 changes[field] += mlf.groups()[0] + '\n'
194 if signing_rules == 1 and inside_signature:
195 raise InvalidDscError, index
197 changes["filecontents"] = "".join(lines)
199 if changes.has_key("source"):
200 # Strip the source version in brackets from the source field,
201 # put it in the "source-version" field instead.
202 srcver = re_srchasver.search(changes["source"])
204 changes["source"] = srcver.group(1)
205 changes["source-version"] = srcver.group(2)
208 raise ParseChangesError, error
212 ################################################################################
214 def parse_changes(filename, signing_rules=0):
216 Parses a changes file and returns a dictionary where each field is a
217 key. The mandatory first argument is the filename of the .changes
220 signing_rules is an optional argument:
222 - If signing_rules == -1, no signature is required.
223 - If signing_rules == 0 (the default), a signature is required.
224 - If signing_rules == 1, it turns on the same strict format checking
227 The rules for (signing_rules == 1)-mode are:
229 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
230 followed by any PGP header data and must end with a blank line.
232 - The data section must end with a blank line and must be followed by
233 "-----BEGIN PGP SIGNATURE-----".
236 changes_in = open_file(filename)
237 content = changes_in.read()
240 unicode(content, 'utf-8')
242 raise ChangesUnicodeError, "Changes file not proper utf-8"
243 return parse_deb822(content, signing_rules)
245 ################################################################################
247 def hash_key(hashname):
248 return '%ssum' % hashname
250 ################################################################################
252 def create_hash(where, files, hashname, hashfunc):
254 create_hash extends the passed files dict with the given hash by
255 iterating over all files on disk and passing them to the hashing
260 for f in files.keys():
262 file_handle = open_file(f)
263 except CantOpenError:
264 rejmsg.append("Could not open file %s for checksumming" % (f))
267 files[f][hash_key(hashname)] = hashfunc(file_handle)
272 ################################################################################
274 def check_hash(where, files, hashname, hashfunc):
276 check_hash checks the given hash in the files dict against the actual
277 files on disk. The hash values need to be present consistently in
278 all file entries. It does not modify its input in any way.
282 for f in files.keys():
286 file_handle = open_file(f)
288 # Check for the hash entry, to not trigger a KeyError.
289 if not files[f].has_key(hash_key(hashname)):
290 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
294 # Actually check the hash for correctness.
295 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
296 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
298 except CantOpenError:
299 # TODO: This happens when the file is in the pool.
300 # warn("Cannot open file %s" % f)
307 ################################################################################
309 def check_size(where, files):
311 check_size checks the file sizes in the passed files dict against the
316 for f in files.keys():
321 # TODO: This happens when the file is in the pool.
325 actual_size = entry[stat.ST_SIZE]
326 size = int(files[f]["size"])
327 if size != actual_size:
328 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
329 % (f, actual_size, size, where))
332 ################################################################################
334 def check_hash_fields(what, manifest):
336 check_hash_fields ensures that there are no checksum fields in the
337 given dict that we do not know about.
341 hashes = map(lambda x: x[0], known_hashes)
342 for field in manifest:
343 if field.startswith("checksums-"):
344 hashname = field.split("-",1)[1]
345 if hashname not in hashes:
346 rejmsg.append("Unsupported checksum field for %s "\
347 "in %s" % (hashname, what))
350 ################################################################################
352 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
353 if format >= version:
354 # The version should contain the specified hash.
357 # Import hashes from the changes
358 rejmsg = parse_checksums(".changes", files, changes, hashname)
362 # We need to calculate the hash because it can't possibly
365 return func(".changes", files, hashname, hashfunc)
367 # We could add the orig which might be in the pool to the files dict to
368 # access the checksums easily.
370 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
372 ensure_dsc_hashes' task is to ensure that each and every *present* hash
373 in the dsc is correct, i.e. identical to the changes file and if necessary
374 the pool. The latter task is delegated to check_hash.
378 if not dsc.has_key('Checksums-%s' % (hashname,)):
380 # Import hashes from the dsc
381 parse_checksums(".dsc", dsc_files, dsc, hashname)
383 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
386 ################################################################################
388 def ensure_hashes(changes, dsc, files, dsc_files):
391 # Make sure we recognise the format of the Files: field in the .changes
392 format = changes.get("format", "0.0").split(".", 1)
394 format = int(format[0]), int(format[1])
396 format = int(float(format[0])), 0
398 # We need to deal with the original changes blob, as the fields we need
399 # might not be in the changes dict serialised into the .dak anymore.
400 orig_changes = parse_deb822(changes['filecontents'])
402 # Copy the checksums over to the current changes dict. This will keep
403 # the existing modifications to it intact.
404 for field in orig_changes:
405 if field.startswith('checksums-'):
406 changes[field] = orig_changes[field]
408 # Check for unsupported hashes
409 rejmsg.extend(check_hash_fields(".changes", changes))
410 rejmsg.extend(check_hash_fields(".dsc", dsc))
412 # We have to calculate the hash if we have an earlier changes version than
413 # the hash appears in rather than require it exist in the changes file
414 for hashname, hashfunc, version in known_hashes:
415 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
417 if "source" in changes["architecture"]:
418 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
423 def parse_checksums(where, files, manifest, hashname):
425 field = 'checksums-%s' % hashname
426 if not field in manifest:
428 for line in manifest[field].split('\n'):
431 checksum, size, checkfile = line.strip().split(' ')
432 if not files.has_key(checkfile):
433 # TODO: check for the file's entry in the original files dict, not
434 # the one modified by (auto)byhand and other weird stuff
435 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
436 # (file, hashname, where))
438 if not files[checkfile]["size"] == size:
439 rejmsg.append("%s: size differs for files and checksums-%s entry "\
440 "in %s" % (checkfile, hashname, where))
442 files[checkfile][hash_key(hashname)] = checksum
443 for f in files.keys():
444 if not files[f].has_key(hash_key(hashname)):
445 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
449 ################################################################################
451 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
453 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
456 # Make sure we have a Files: field to parse...
457 if not changes.has_key(field):
458 raise NoFilesFieldError
460 # Make sure we recognise the format of the Files: field
461 format = re_verwithext.search(changes.get("format", "0.0"))
463 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
465 format = format.groups()
466 if format[1] == None:
467 format = int(float(format[0])), 0, format[2]
469 format = int(format[0]), int(format[1]), format[2]
470 if format[2] == None:
474 # format = (1,0) are the only formats we currently accept,
475 # format = (0,0) are missing format headers of which we still
476 # have some in the archive.
477 if format != (1,0) and format != (0,0):
478 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
480 if (format < (1,5) or format > (1,8)):
481 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
482 if field != "files" and format < (1,8):
483 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
485 includes_section = (not is_a_dsc) and field == "files"
487 # Parse each entry/line:
488 for i in changes[field].split('\n'):
492 section = priority = ""
495 (md5, size, section, priority, name) = s
497 (md5, size, name) = s
499 raise ParseChangesError, i
506 (section, component) = extract_component_from_section(section)
508 files[name] = Dict(size=size, section=section,
509 priority=priority, component=component)
510 files[name][hashname] = md5
514 ################################################################################
516 def send_mail (message, filename=""):
517 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
519 # If we've been passed a string dump it into a temporary file
521 (fd, filename) = tempfile.mkstemp()
522 os.write (fd, message)
525 if Cnf.has_key("Dinstall::MailWhiteList") and \
526 Cnf["Dinstall::MailWhiteList"] != "":
527 message_in = open_file(filename)
528 message_raw = modemail.message_from_file(message_in)
532 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
534 for line in whitelist_in:
535 if not re_whitespace_comment.match(line):
536 if re_re_mark.match(line):
537 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
539 whitelist.append(re.compile(re.escape(line.strip())))
544 fields = ["To", "Bcc", "Cc"]
547 value = message_raw.get(field, None)
550 for item in value.split(","):
551 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
557 if not mail_whitelisted:
558 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
562 # Doesn't have any mail in whitelist so remove the header
564 del message_raw[field]
566 message_raw.replace_header(field, string.join(match, ", "))
568 # Change message fields in order if we don't have a To header
569 if not message_raw.has_key("To"):
572 if message_raw.has_key(field):
573 message_raw[fields[-1]] = message_raw[field]
574 del message_raw[field]
577 # Clean up any temporary files
578 # and return, as we removed all recipients.
580 os.unlink (filename);
583 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
584 os.write (fd, message_raw.as_string(True));
588 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
590 raise SendmailFailedError, output
592 # Clean up any temporary files
596 ################################################################################
598 def poolify (source, component):
601 if source[:3] == "lib":
602 return component + source[:4] + '/' + source + '/'
604 return component + source[:1] + '/' + source + '/'
606 ################################################################################
608 def move (src, dest, overwrite = 0, perms = 0664):
609 if os.path.exists(dest) and os.path.isdir(dest):
612 dest_dir = os.path.dirname(dest)
613 if not os.path.exists(dest_dir):
614 umask = os.umask(00000)
615 os.makedirs(dest_dir, 02775)
617 #print "Moving %s to %s..." % (src, dest)
618 if os.path.exists(dest) and os.path.isdir(dest):
619 dest += '/' + os.path.basename(src)
620 # Don't overwrite unless forced to
621 if os.path.exists(dest):
623 fubar("Can't move %s to %s - file already exists." % (src, dest))
625 if not os.access(dest, os.W_OK):
626 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
627 shutil.copy2(src, dest)
628 os.chmod(dest, perms)
631 def copy (src, dest, overwrite = 0, perms = 0664):
632 if os.path.exists(dest) and os.path.isdir(dest):
635 dest_dir = os.path.dirname(dest)
636 if not os.path.exists(dest_dir):
637 umask = os.umask(00000)
638 os.makedirs(dest_dir, 02775)
640 #print "Copying %s to %s..." % (src, dest)
641 if os.path.exists(dest) and os.path.isdir(dest):
642 dest += '/' + os.path.basename(src)
643 # Don't overwrite unless forced to
644 if os.path.exists(dest):
646 raise FileExistsError
648 if not os.access(dest, os.W_OK):
649 raise CantOverwriteError
650 shutil.copy2(src, dest)
651 os.chmod(dest, perms)
653 ################################################################################
656 res = socket.gethostbyaddr(socket.gethostname())
657 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
658 if database_hostname:
659 return database_hostname
663 def which_conf_file ():
664 res = socket.gethostbyaddr(socket.gethostname())
665 # In case we allow local config files per user, try if one exists
666 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
667 homedir = os.getenv("HOME")
668 confpath = os.path.join(homedir, "/etc/dak.conf")
669 if os.path.exists(confpath):
670 apt_pkg.ReadConfigFileISC(Cnf,default_config)
672 # We are still in here, so there is no local config file or we do
673 # not allow local files. Do the normal stuff.
674 if Cnf.get("Config::" + res[0] + "::DakConfig"):
675 return Cnf["Config::" + res[0] + "::DakConfig"]
677 return default_config
679 def which_apt_conf_file ():
680 res = socket.gethostbyaddr(socket.gethostname())
681 # In case we allow local config files per user, try if one exists
682 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
683 homedir = os.getenv("HOME")
684 confpath = os.path.join(homedir, "/etc/dak.conf")
685 if os.path.exists(confpath):
686 apt_pkg.ReadConfigFileISC(Cnf,default_config)
688 if Cnf.get("Config::" + res[0] + "::AptConfig"):
689 return Cnf["Config::" + res[0] + "::AptConfig"]
691 return default_apt_config
693 def which_alias_file():
694 hostname = socket.gethostbyaddr(socket.gethostname())[0]
695 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
696 if os.path.exists(aliasfn):
701 ################################################################################
703 # Escape characters which have meaning to SQL's regex comparison operator ('~')
704 # (woefully incomplete)
707 s = s.replace('+', '\\\\+')
708 s = s.replace('.', '\\\\.')
711 ################################################################################
713 def TemplateSubst(map, filename):
714 """ Perform a substition of template """
715 templatefile = open_file(filename)
716 template = templatefile.read()
718 template = template.replace(x,map[x])
722 ################################################################################
724 def fubar(msg, exit_code=1):
725 sys.stderr.write("E: %s\n" % (msg))
729 sys.stderr.write("W: %s\n" % (msg))
731 ################################################################################
733 # Returns the user name with a laughable attempt at rfc822 conformancy
734 # (read: removing stray periods).
736 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
738 ################################################################################
748 return ("%d%s" % (c, t))
750 ################################################################################
752 def cc_fix_changes (changes):
753 o = changes.get("architecture", "")
755 del changes["architecture"]
756 changes["architecture"] = {}
758 changes["architecture"][j] = 1
760 def changes_compare (a, b):
761 """ Sort by source name, source version, 'have source', and then by filename """
763 a_changes = parse_changes(a)
768 b_changes = parse_changes(b)
772 cc_fix_changes (a_changes)
773 cc_fix_changes (b_changes)
775 # Sort by source name
776 a_source = a_changes.get("source")
777 b_source = b_changes.get("source")
778 q = cmp (a_source, b_source)
782 # Sort by source version
783 a_version = a_changes.get("version", "0")
784 b_version = b_changes.get("version", "0")
785 q = apt_pkg.VersionCompare(a_version, b_version)
789 # Sort by 'have source'
790 a_has_source = a_changes["architecture"].get("source")
791 b_has_source = b_changes["architecture"].get("source")
792 if a_has_source and not b_has_source:
794 elif b_has_source and not a_has_source:
797 # Fall back to sort by filename
800 ################################################################################
802 def find_next_free (dest, too_many=100):
805 while os.path.exists(dest) and extra < too_many:
806 dest = orig_dest + '.' + repr(extra)
808 if extra >= too_many:
809 raise NoFreeFilenameError
812 ################################################################################
814 def result_join (original, sep = '\t'):
816 for i in xrange(len(original)):
817 if original[i] == None:
818 resultlist.append("")
820 resultlist.append(original[i])
821 return sep.join(resultlist)
823 ################################################################################
825 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
827 for line in str.split('\n'):
829 if line or include_blank_lines:
830 out += "%s%s\n" % (prefix, line)
831 # Strip trailing new line
836 ################################################################################
838 def validate_changes_file_arg(filename, require_changes=1):
840 'filename' is either a .changes or .dak file. If 'filename' is a
841 .dak file, it's changed to be the corresponding .changes file. The
842 function then checks if the .changes file a) exists and b) is
843 readable and returns the .changes filename if so. If there's a
844 problem, the next action depends on the option 'require_changes'
847 - If 'require_changes' == -1, errors are ignored and the .changes
848 filename is returned.
849 - If 'require_changes' == 0, a warning is given and 'None' is returned.
850 - If 'require_changes' == 1, a fatal error is raised.
855 orig_filename = filename
856 if filename.endswith(".dak"):
857 filename = filename[:-4]+".changes"
859 if not filename.endswith(".changes"):
860 error = "invalid file type; not a changes file"
862 if not os.access(filename,os.R_OK):
863 if os.path.exists(filename):
864 error = "permission denied"
866 error = "file not found"
869 if require_changes == 1:
870 fubar("%s: %s." % (orig_filename, error))
871 elif require_changes == 0:
872 warn("Skipping %s - %s" % (orig_filename, error))
874 else: # We only care about the .dak file
879 ################################################################################
882 return (arch != "source" and arch != "all")
884 ################################################################################
886 def join_with_commas_and(list):
887 if len(list) == 0: return "nothing"
888 if len(list) == 1: return list[0]
889 return ", ".join(list[:-1]) + " and " + list[-1]
891 ################################################################################
896 (pkg, version, constraint) = atom
898 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
901 pp_deps.append(pp_dep)
902 return " |".join(pp_deps)
904 ################################################################################
909 ################################################################################
911 def parse_args(Options):
912 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
916 for suite in split_args(Options["Suite"]):
917 suite_id = database.get_suite_id(suite)
919 warn("suite '%s' not recognised." % (suite))
921 suite_ids_list.append(suite_id)
923 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
925 fubar("No valid suite given.")
930 if Options["Component"]:
931 component_ids_list = []
932 for component in split_args(Options["Component"]):
933 component_id = database.get_component_id(component)
934 if component_id == -1:
935 warn("component '%s' not recognised." % (component))
937 component_ids_list.append(component_id)
938 if component_ids_list:
939 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
941 fubar("No valid component given.")
945 # Process architecture
946 con_architectures = ""
947 if Options["Architecture"]:
950 for architecture in split_args(Options["Architecture"]):
951 if architecture == "source":
954 architecture_id = database.get_architecture_id(architecture)
955 if architecture_id == -1:
956 warn("architecture '%s' not recognised." % (architecture))
958 arch_ids_list.append(architecture_id)
960 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
963 fubar("No valid architecture given.")
967 return (con_suites, con_architectures, con_components, check_source)
969 ################################################################################
971 # Inspired(tm) by Bryn Keller's print_exc_plus (See
972 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
975 tb = sys.exc_info()[2]
984 traceback.print_exc()
986 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
987 frame.f_code.co_filename,
989 for key, value in frame.f_locals.items():
990 print "\t%20s = " % key,
994 print "<unable to print>"
996 ################################################################################
998 def try_with_debug(function):
1006 ################################################################################
1008 def arch_compare_sw (a, b):
1010 Function for use in sorting lists of architectures.
1012 Sorts normally except that 'source' dominates all others.
1015 if a == "source" and b == "source":
1024 ################################################################################
1026 def split_args (s, dwim=1):
1028 Split command line arguments which can be separated by either commas
1029 or whitespace. If dwim is set, it will complain about string ending
1030 in comma since this usually means someone did 'dak ls -a i386, m68k
1031 foo' or something and the inevitable confusion resulting from 'm68k'
1032 being treated as an argument is undesirable.
1035 if s.find(",") == -1:
1038 if s[-1:] == "," and dwim:
1039 fubar("split_args: found trailing comma, spurious space maybe?")
1042 ################################################################################
1044 def Dict(**dict): return dict
1046 ########################################
1048 def gpgv_get_status_output(cmd, status_read, status_write):
1050 Our very own version of commands.getouputstatus(), hacked to support
1054 cmd = ['/bin/sh', '-c', cmd]
1055 p2cread, p2cwrite = os.pipe()
1056 c2pread, c2pwrite = os.pipe()
1057 errout, errin = os.pipe()
1067 for i in range(3, 256):
1068 if i != status_write:
1074 os.execvp(cmd[0], cmd)
1080 os.dup2(c2pread, c2pwrite)
1081 os.dup2(errout, errin)
1083 output = status = ""
1085 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1088 r = os.read(fd, 8196)
1090 more_data.append(fd)
1091 if fd == c2pwrite or fd == errin:
1093 elif fd == status_read:
1096 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1098 pid, exit_status = os.waitpid(pid, 0)
1100 os.close(status_write)
1101 os.close(status_read)
1111 return output, status, exit_status
1113 ################################################################################
1115 def process_gpgv_output(status):
1116 # Process the status-fd output
1119 for line in status.split('\n'):
1123 split = line.split()
1125 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1127 (gnupg, keyword) = split[:2]
1128 if gnupg != "[GNUPG:]":
1129 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1132 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1133 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1136 keywords[keyword] = args
1138 return (keywords, internal_error)
1140 ################################################################################
1142 def retrieve_key (filename, keyserver=None, keyring=None):
1144 Retrieve the key that signed 'filename' from 'keyserver' and
1145 add it to 'keyring'. Returns nothing on success, or an error message
1149 # Defaults for keyserver and keyring
1151 keyserver = Cnf["Dinstall::KeyServer"]
1153 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1155 # Ensure the filename contains no shell meta-characters or other badness
1156 if not re_taint_free.match(filename):
1157 return "%s: tainted filename" % (filename)
1159 # Invoke gpgv on the file
1160 status_read, status_write = os.pipe()
1161 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1162 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1164 # Process the status-fd output
1165 (keywords, internal_error) = process_gpgv_output(status)
1167 return internal_error
1169 if not keywords.has_key("NO_PUBKEY"):
1170 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1172 fingerprint = keywords["NO_PUBKEY"][0]
1173 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1174 # it'll try to create a lockfile in /dev. A better solution might
1175 # be a tempfile or something.
1176 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1177 % (Cnf["Dinstall::SigningKeyring"])
1178 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1179 % (keyring, keyserver, fingerprint)
1180 (result, output) = commands.getstatusoutput(cmd)
1182 return "'%s' failed with exit code %s" % (cmd, result)
1186 ################################################################################
1188 def gpg_keyring_args(keyrings=None):
1190 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1192 return " ".join(["--keyring %s" % x for x in keyrings])
1194 ################################################################################
1196 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1198 Check the signature of a file and return the fingerprint if the
1199 signature is valid or 'None' if it's not. The first argument is the
1200 filename whose signature should be checked. The second argument is a
1201 reject function and is called when an error is found. The reject()
1202 function must allow for two arguments: the first is the error message,
1203 the second is an optional prefix string. It's possible for reject()
1204 to be called more than once during an invocation of check_signature().
1205 The third argument is optional and is the name of the files the
1206 detached signature applies to. The fourth argument is optional and is
1207 a *list* of keyrings to use. 'autofetch' can either be None, True or
1208 False. If None, the default behaviour specified in the config will be
1212 # Ensure the filename contains no shell meta-characters or other badness
1213 if not re_taint_free.match(sig_filename):
1214 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1217 if data_filename and not re_taint_free.match(data_filename):
1218 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1222 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1224 # Autofetch the signing key if that's enabled
1225 if autofetch == None:
1226 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1228 error_msg = retrieve_key(sig_filename)
1233 # Build the command line
1234 status_read, status_write = os.pipe()
1235 cmd = "gpgv --status-fd %s %s %s %s" % (
1236 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1238 # Invoke gpgv on the file
1239 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1241 # Process the status-fd output
1242 (keywords, internal_error) = process_gpgv_output(status)
1244 # If we failed to parse the status-fd output, let's just whine and bail now
1246 reject("internal error while performing signature check on %s." % (sig_filename))
1247 reject(internal_error, "")
1248 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1252 # Now check for obviously bad things in the processed output
1253 if keywords.has_key("KEYREVOKED"):
1254 reject("The key used to sign %s has been revoked." % (sig_filename))
1256 if keywords.has_key("BADSIG"):
1257 reject("bad signature on %s." % (sig_filename))
1259 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1260 reject("failed to check signature on %s." % (sig_filename))
1262 if keywords.has_key("NO_PUBKEY"):
1263 args = keywords["NO_PUBKEY"]
1266 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1268 if keywords.has_key("BADARMOR"):
1269 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1271 if keywords.has_key("NODATA"):
1272 reject("no signature found in %s." % (sig_filename))
1274 if keywords.has_key("EXPKEYSIG"):
1275 args = keywords["EXPKEYSIG"]
1278 reject("Signature made by expired key 0x%s" % (key))
1280 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1281 args = keywords["KEYEXPIRED"]
1285 if timestamp.count("T") == 0:
1287 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1289 expiredate = "unknown (%s)" % (timestamp)
1291 expiredate = timestamp
1292 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1298 # Next check gpgv exited with a zero return code
1300 reject("gpgv failed while checking %s." % (sig_filename))
1302 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1304 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1307 # Sanity check the good stuff we expect
1308 if not keywords.has_key("VALIDSIG"):
1309 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1312 args = keywords["VALIDSIG"]
1314 reject("internal error while checking signature on %s." % (sig_filename))
1317 fingerprint = args[0]
1318 if not keywords.has_key("GOODSIG"):
1319 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1321 if not keywords.has_key("SIG_ID"):
1322 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1325 # Finally ensure there's not something we don't recognise
1326 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1327 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1328 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1330 for keyword in keywords.keys():
1331 if not known_keywords.has_key(keyword):
1332 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1340 ################################################################################
1342 def gpg_get_key_addresses(fingerprint):
1343 """retreive email addresses from gpg key uids for a given fingerprint"""
1344 addresses = key_uid_email_cache.get(fingerprint)
1345 if addresses != None:
1348 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1349 % (gpg_keyring_args(), fingerprint)
1350 (result, output) = commands.getstatusoutput(cmd)
1352 for l in output.split('\n'):
1353 m = re_gpg_uid.match(l)
1355 addresses.add(m.group(1))
1356 key_uid_email_cache[fingerprint] = addresses
1359 ################################################################################
1361 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1363 def wrap(paragraph, max_length, prefix=""):
1367 words = paragraph.split()
1370 word_size = len(word)
1371 if word_size > max_length:
1373 s += line + '\n' + prefix
1374 s += word + '\n' + prefix
1377 new_length = len(line) + word_size + 1
1378 if new_length > max_length:
1379 s += line + '\n' + prefix
1392 ################################################################################
1394 def clean_symlink (src, dest, root):
1396 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1399 src = src.replace(root, '', 1)
1400 dest = dest.replace(root, '', 1)
1401 dest = os.path.dirname(dest)
1402 new_src = '../' * len(dest.split('/'))
1403 return new_src + src
1405 ################################################################################
1407 def temp_filename(directory=None, prefix="dak", suffix=""):
1409 Return a secure and unique filename by pre-creating it.
1410 If 'directory' is non-null, it will be the directory the file is pre-created in.
1411 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1412 If 'suffix' is non-null, the filename will end with it.
1414 Returns a pair (fd, name).
1417 return tempfile.mkstemp(suffix, prefix, directory)
1419 ################################################################################
1421 def temp_dirname(parent=None, prefix="dak", suffix=""):
1423 Return a secure and unique directory by pre-creating it.
1424 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1425 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1426 If 'suffix' is non-null, the filename will end with it.
1428 Returns a pathname to the new directory
1431 return tempfile.mkdtemp(suffix, prefix, parent)
1433 ################################################################################
1435 def is_email_alias(email):
1436 """ checks if the user part of the email is listed in the alias file """
1438 if alias_cache == None:
1439 aliasfn = which_alias_file()
1442 for l in open(aliasfn):
1443 alias_cache.add(l.split(':')[0])
1444 uid = email.split('@')[0]
1445 return uid in alias_cache
1447 ################################################################################
1449 def get_changes_files(dir):
1451 Takes a directory and lists all .changes files in it (as well as chdir'ing
1452 to the directory; this is due to broken behaviour on the part of p-u/p-a
1453 when you're not in the right place)
1455 Returns a list of filenames
1458 # Much of the rest of p-u/p-a depends on being in the right place
1460 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1462 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1464 return changes_files
1466 ################################################################################
1470 Cnf = apt_pkg.newConfiguration()
1471 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1473 if which_conf_file() != default_config:
1474 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1476 ###############################################################################