2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
51 ################################################################################
53 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
54 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
56 alias_cache = None #: Cache for email alias checks
57 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
63 ################################################################################
66 """ Escape html chars """
67 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
69 ################################################################################
71 def open_file(filename, mode='r'):
73 Open C{file}, return fileobject.
75 @type filename: string
76 @param filename: path/filename to open
79 @param mode: open mode
82 @return: open fileobject
84 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
88 f = open(filename, mode)
90 raise CantOpenError, filename
93 ################################################################################
95 def our_raw_input(prompt=""):
97 sys.stdout.write(prompt)
103 sys.stderr.write("\nUser interrupt (^D).\n")
106 ################################################################################
108 def extract_component_from_section(section):
111 if section.find('/') != -1:
112 component = section.split('/')[0]
114 # Expand default component
116 if Cnf.has_key("Component::%s" % section):
121 return (section, component)
123 ################################################################################
125 def parse_deb822(contents, signing_rules=0):
129 # Split the lines in the input, keeping the linebreaks.
130 lines = contents.splitlines(True)
133 raise ParseChangesError, "[Empty changes file]"
135 # Reindex by line number so we can easily verify the format of
141 indexed_lines[index] = line[:-1]
145 num_of_lines = len(indexed_lines.keys())
148 while index < num_of_lines:
150 line = indexed_lines[index]
152 if signing_rules == 1:
154 if index > num_of_lines:
155 raise InvalidDscError, index
156 line = indexed_lines[index]
157 if not line.startswith("-----BEGIN PGP SIGNATURE"):
158 raise InvalidDscError, index
163 if line.startswith("-----BEGIN PGP SIGNATURE"):
165 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
167 if signing_rules == 1:
168 while index < num_of_lines and line != "":
170 line = indexed_lines[index]
172 # If we're not inside the signed data, don't process anything
173 if signing_rules >= 0 and not inside_signature:
175 slf = re_single_line_field.match(line)
177 field = slf.groups()[0].lower()
178 changes[field] = slf.groups()[1]
182 changes[field] += '\n'
184 mlf = re_multi_line_field.match(line)
187 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
188 if first == 1 and changes[field] != "":
189 changes[field] += '\n'
191 changes[field] += mlf.groups()[0] + '\n'
195 if signing_rules == 1 and inside_signature:
196 raise InvalidDscError, index
198 changes["filecontents"] = "".join(lines)
200 if changes.has_key("source"):
201 # Strip the source version in brackets from the source field,
202 # put it in the "source-version" field instead.
203 srcver = re_srchasver.search(changes["source"])
205 changes["source"] = srcver.group(1)
206 changes["source-version"] = srcver.group(2)
209 raise ParseChangesError, error
213 ################################################################################
215 def parse_changes(filename, signing_rules=0):
217 Parses a changes file and returns a dictionary where each field is a
218 key. The mandatory first argument is the filename of the .changes
221 signing_rules is an optional argument:
223 - If signing_rules == -1, no signature is required.
224 - If signing_rules == 0 (the default), a signature is required.
225 - If signing_rules == 1, it turns on the same strict format checking
228 The rules for (signing_rules == 1)-mode are:
230 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
231 followed by any PGP header data and must end with a blank line.
233 - The data section must end with a blank line and must be followed by
234 "-----BEGIN PGP SIGNATURE-----".
237 changes_in = open_file(filename)
238 content = changes_in.read()
241 unicode(content, 'utf-8')
243 raise ChangesUnicodeError, "Changes file not proper utf-8"
244 return parse_deb822(content, signing_rules)
246 ################################################################################
248 def hash_key(hashname):
249 return '%ssum' % hashname
251 ################################################################################
253 def create_hash(where, files, hashname, hashfunc):
255 create_hash extends the passed files dict with the given hash by
256 iterating over all files on disk and passing them to the hashing
261 for f in files.keys():
263 file_handle = open_file(f)
264 except CantOpenError:
265 rejmsg.append("Could not open file %s for checksumming" % (f))
268 files[f][hash_key(hashname)] = hashfunc(file_handle)
273 ################################################################################
275 def check_hash(where, files, hashname, hashfunc):
277 check_hash checks the given hash in the files dict against the actual
278 files on disk. The hash values need to be present consistently in
279 all file entries. It does not modify its input in any way.
283 for f in files.keys():
287 file_handle = open_file(f)
289 # Check for the hash entry, to not trigger a KeyError.
290 if not files[f].has_key(hash_key(hashname)):
291 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
295 # Actually check the hash for correctness.
296 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
297 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
299 except CantOpenError:
300 # TODO: This happens when the file is in the pool.
301 # warn("Cannot open file %s" % f)
308 ################################################################################
310 def check_size(where, files):
312 check_size checks the file sizes in the passed files dict against the
317 for f in files.keys():
322 # TODO: This happens when the file is in the pool.
326 actual_size = entry[stat.ST_SIZE]
327 size = int(files[f]["size"])
328 if size != actual_size:
329 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
330 % (f, actual_size, size, where))
333 ################################################################################
335 def check_hash_fields(what, manifest):
337 check_hash_fields ensures that there are no checksum fields in the
338 given dict that we do not know about.
342 hashes = map(lambda x: x[0], known_hashes)
343 for field in manifest:
344 if field.startswith("checksums-"):
345 hashname = field.split("-",1)[1]
346 if hashname not in hashes:
347 rejmsg.append("Unsupported checksum field for %s "\
348 "in %s" % (hashname, what))
351 ################################################################################
353 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
354 if format >= version:
355 # The version should contain the specified hash.
358 # Import hashes from the changes
359 rejmsg = parse_checksums(".changes", files, changes, hashname)
363 # We need to calculate the hash because it can't possibly
366 return func(".changes", files, hashname, hashfunc)
368 # We could add the orig which might be in the pool to the files dict to
369 # access the checksums easily.
371 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
373 ensure_dsc_hashes' task is to ensure that each and every *present* hash
374 in the dsc is correct, i.e. identical to the changes file and if necessary
375 the pool. The latter task is delegated to check_hash.
379 if not dsc.has_key('Checksums-%s' % (hashname,)):
381 # Import hashes from the dsc
382 parse_checksums(".dsc", dsc_files, dsc, hashname)
384 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
387 ################################################################################
389 def parse_checksums(where, files, manifest, hashname):
391 field = 'checksums-%s' % hashname
392 if not field in manifest:
394 for line in manifest[field].split('\n'):
397 clist = line.strip().split(' ')
399 checksum, size, checkfile = clist
401 rejmsg.append("Cannot parse checksum line [%s]" % (line))
403 if not files.has_key(checkfile):
404 # TODO: check for the file's entry in the original files dict, not
405 # the one modified by (auto)byhand and other weird stuff
406 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
407 # (file, hashname, where))
409 if not files[checkfile]["size"] == size:
410 rejmsg.append("%s: size differs for files and checksums-%s entry "\
411 "in %s" % (checkfile, hashname, where))
413 files[checkfile][hash_key(hashname)] = checksum
414 for f in files.keys():
415 if not files[f].has_key(hash_key(hashname)):
416 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
420 ################################################################################
422 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
424 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
427 # Make sure we have a Files: field to parse...
428 if not changes.has_key(field):
429 raise NoFilesFieldError
431 # Make sure we recognise the format of the Files: field
432 format = re_verwithext.search(changes.get("format", "0.0"))
434 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
436 format = format.groups()
437 if format[1] == None:
438 format = int(float(format[0])), 0, format[2]
440 format = int(format[0]), int(format[1]), format[2]
441 if format[2] == None:
445 # format = (1,0) are the only formats we currently accept,
446 # format = (0,0) are missing format headers of which we still
447 # have some in the archive.
448 if format != (1,0) and format != (0,0):
449 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
451 if (format < (1,5) or format > (1,8)):
452 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
453 if field != "files" and format < (1,8):
454 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
456 includes_section = (not is_a_dsc) and field == "files"
458 # Parse each entry/line:
459 for i in changes[field].split('\n'):
463 section = priority = ""
466 (md5, size, section, priority, name) = s
468 (md5, size, name) = s
470 raise ParseChangesError, i
477 (section, component) = extract_component_from_section(section)
479 files[name] = Dict(size=size, section=section,
480 priority=priority, component=component)
481 files[name][hashname] = md5
485 ################################################################################
487 def send_mail (message, filename=""):
488 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
490 # If we've been passed a string dump it into a temporary file
492 (fd, filename) = tempfile.mkstemp()
493 os.write (fd, message)
496 if Cnf.has_key("Dinstall::MailWhiteList") and \
497 Cnf["Dinstall::MailWhiteList"] != "":
498 message_in = open_file(filename)
499 message_raw = modemail.message_from_file(message_in)
503 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
505 for line in whitelist_in:
506 if not re_whitespace_comment.match(line):
507 if re_re_mark.match(line):
508 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
510 whitelist.append(re.compile(re.escape(line.strip())))
515 fields = ["To", "Bcc", "Cc"]
518 value = message_raw.get(field, None)
521 for item in value.split(","):
522 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
528 if not mail_whitelisted:
529 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
533 # Doesn't have any mail in whitelist so remove the header
535 del message_raw[field]
537 message_raw.replace_header(field, string.join(match, ", "))
539 # Change message fields in order if we don't have a To header
540 if not message_raw.has_key("To"):
543 if message_raw.has_key(field):
544 message_raw[fields[-1]] = message_raw[field]
545 del message_raw[field]
548 # Clean up any temporary files
549 # and return, as we removed all recipients.
551 os.unlink (filename);
554 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
555 os.write (fd, message_raw.as_string(True));
559 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
561 raise SendmailFailedError, output
563 # Clean up any temporary files
567 ################################################################################
569 def poolify (source, component):
572 if source[:3] == "lib":
573 return component + source[:4] + '/' + source + '/'
575 return component + source[:1] + '/' + source + '/'
577 ################################################################################
579 def move (src, dest, overwrite = 0, perms = 0664):
580 if os.path.exists(dest) and os.path.isdir(dest):
583 dest_dir = os.path.dirname(dest)
584 if not os.path.exists(dest_dir):
585 umask = os.umask(00000)
586 os.makedirs(dest_dir, 02775)
588 #print "Moving %s to %s..." % (src, dest)
589 if os.path.exists(dest) and os.path.isdir(dest):
590 dest += '/' + os.path.basename(src)
591 # Don't overwrite unless forced to
592 if os.path.exists(dest):
594 fubar("Can't move %s to %s - file already exists." % (src, dest))
596 if not os.access(dest, os.W_OK):
597 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
598 shutil.copy2(src, dest)
599 os.chmod(dest, perms)
602 def copy (src, dest, overwrite = 0, perms = 0664):
603 if os.path.exists(dest) and os.path.isdir(dest):
606 dest_dir = os.path.dirname(dest)
607 if not os.path.exists(dest_dir):
608 umask = os.umask(00000)
609 os.makedirs(dest_dir, 02775)
611 #print "Copying %s to %s..." % (src, dest)
612 if os.path.exists(dest) and os.path.isdir(dest):
613 dest += '/' + os.path.basename(src)
614 # Don't overwrite unless forced to
615 if os.path.exists(dest):
617 raise FileExistsError
619 if not os.access(dest, os.W_OK):
620 raise CantOverwriteError
621 shutil.copy2(src, dest)
622 os.chmod(dest, perms)
624 ################################################################################
627 res = socket.gethostbyaddr(socket.gethostname())
628 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
629 if database_hostname:
630 return database_hostname
634 def which_conf_file ():
635 res = socket.gethostbyaddr(socket.gethostname())
636 # In case we allow local config files per user, try if one exists
637 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
638 homedir = os.getenv("HOME")
639 confpath = os.path.join(homedir, "/etc/dak.conf")
640 if os.path.exists(confpath):
641 apt_pkg.ReadConfigFileISC(Cnf,default_config)
643 # We are still in here, so there is no local config file or we do
644 # not allow local files. Do the normal stuff.
645 if Cnf.get("Config::" + res[0] + "::DakConfig"):
646 return Cnf["Config::" + res[0] + "::DakConfig"]
648 return default_config
650 def which_apt_conf_file ():
651 res = socket.gethostbyaddr(socket.gethostname())
652 # In case we allow local config files per user, try if one exists
653 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
654 homedir = os.getenv("HOME")
655 confpath = os.path.join(homedir, "/etc/dak.conf")
656 if os.path.exists(confpath):
657 apt_pkg.ReadConfigFileISC(Cnf,default_config)
659 if Cnf.get("Config::" + res[0] + "::AptConfig"):
660 return Cnf["Config::" + res[0] + "::AptConfig"]
662 return default_apt_config
664 def which_alias_file():
665 hostname = socket.gethostbyaddr(socket.gethostname())[0]
666 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
667 if os.path.exists(aliasfn):
672 ################################################################################
674 def TemplateSubst(map, filename):
675 """ Perform a substition of template """
676 templatefile = open_file(filename)
677 template = templatefile.read()
679 template = template.replace(x, str(map[x]))
683 ################################################################################
685 def fubar(msg, exit_code=1):
686 sys.stderr.write("E: %s\n" % (msg))
690 sys.stderr.write("W: %s\n" % (msg))
692 ################################################################################
694 # Returns the user name with a laughable attempt at rfc822 conformancy
695 # (read: removing stray periods).
697 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
700 return pwd.getpwuid(os.getuid())[0]
702 ################################################################################
712 return ("%d%s" % (c, t))
714 ################################################################################
716 def cc_fix_changes (changes):
717 o = changes.get("architecture", "")
719 del changes["architecture"]
720 changes["architecture"] = {}
722 changes["architecture"][j] = 1
724 def changes_compare (a, b):
725 """ Sort by source name, source version, 'have source', and then by filename """
727 a_changes = parse_changes(a)
732 b_changes = parse_changes(b)
736 cc_fix_changes (a_changes)
737 cc_fix_changes (b_changes)
739 # Sort by source name
740 a_source = a_changes.get("source")
741 b_source = b_changes.get("source")
742 q = cmp (a_source, b_source)
746 # Sort by source version
747 a_version = a_changes.get("version", "0")
748 b_version = b_changes.get("version", "0")
749 q = apt_pkg.VersionCompare(a_version, b_version)
753 # Sort by 'have source'
754 a_has_source = a_changes["architecture"].get("source")
755 b_has_source = b_changes["architecture"].get("source")
756 if a_has_source and not b_has_source:
758 elif b_has_source and not a_has_source:
761 # Fall back to sort by filename
764 ################################################################################
766 def find_next_free (dest, too_many=100):
769 while os.path.exists(dest) and extra < too_many:
770 dest = orig_dest + '.' + repr(extra)
772 if extra >= too_many:
773 raise NoFreeFilenameError
776 ################################################################################
778 def result_join (original, sep = '\t'):
780 for i in xrange(len(original)):
781 if original[i] == None:
782 resultlist.append("")
784 resultlist.append(original[i])
785 return sep.join(resultlist)
787 ################################################################################
789 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
791 for line in str.split('\n'):
793 if line or include_blank_lines:
794 out += "%s%s\n" % (prefix, line)
795 # Strip trailing new line
800 ################################################################################
802 def validate_changes_file_arg(filename, require_changes=1):
804 'filename' is either a .changes or .dak file. If 'filename' is a
805 .dak file, it's changed to be the corresponding .changes file. The
806 function then checks if the .changes file a) exists and b) is
807 readable and returns the .changes filename if so. If there's a
808 problem, the next action depends on the option 'require_changes'
811 - If 'require_changes' == -1, errors are ignored and the .changes
812 filename is returned.
813 - If 'require_changes' == 0, a warning is given and 'None' is returned.
814 - If 'require_changes' == 1, a fatal error is raised.
819 orig_filename = filename
820 if filename.endswith(".dak"):
821 filename = filename[:-4]+".changes"
823 if not filename.endswith(".changes"):
824 error = "invalid file type; not a changes file"
826 if not os.access(filename,os.R_OK):
827 if os.path.exists(filename):
828 error = "permission denied"
830 error = "file not found"
833 if require_changes == 1:
834 fubar("%s: %s." % (orig_filename, error))
835 elif require_changes == 0:
836 warn("Skipping %s - %s" % (orig_filename, error))
838 else: # We only care about the .dak file
843 ################################################################################
846 return (arch != "source" and arch != "all")
848 ################################################################################
850 def join_with_commas_and(list):
851 if len(list) == 0: return "nothing"
852 if len(list) == 1: return list[0]
853 return ", ".join(list[:-1]) + " and " + list[-1]
855 ################################################################################
860 (pkg, version, constraint) = atom
862 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
865 pp_deps.append(pp_dep)
866 return " |".join(pp_deps)
868 ################################################################################
873 ################################################################################
875 def parse_args(Options):
876 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
877 # XXX: This should go away and everything which calls it be converted
878 # to use SQLA properly. For now, we'll just fix it not to use
879 # the old Pg interface though
880 session = DBConn().session()
884 for suitename in split_args(Options["Suite"]):
885 suite = get_suite(suitename, session=session)
886 if suite.suite_id is None:
887 warn("suite '%s' not recognised." % (suite.suite_name))
889 suite_ids_list.append(suite.suite_id)
891 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
893 fubar("No valid suite given.")
898 if Options["Component"]:
899 component_ids_list = []
900 for componentname in split_args(Options["Component"]):
901 component = get_component(componentname, session=session)
902 if component is None:
903 warn("component '%s' not recognised." % (componentname))
905 component_ids_list.append(component.component_id)
906 if component_ids_list:
907 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
909 fubar("No valid component given.")
913 # Process architecture
914 con_architectures = ""
916 if Options["Architecture"]:
918 for archname in split_args(Options["Architecture"]):
919 if archname == "source":
922 arch = get_architecture(archname, session=session)
924 warn("architecture '%s' not recognised." % (archname))
926 arch_ids_list.append(arch.arch_id)
928 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
931 fubar("No valid architecture given.")
935 return (con_suites, con_architectures, con_components, check_source)
937 ################################################################################
939 # Inspired(tm) by Bryn Keller's print_exc_plus (See
940 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
943 tb = sys.exc_info()[2]
952 traceback.print_exc()
954 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
955 frame.f_code.co_filename,
957 for key, value in frame.f_locals.items():
958 print "\t%20s = " % key,
962 print "<unable to print>"
964 ################################################################################
966 def try_with_debug(function):
974 ################################################################################
976 def arch_compare_sw (a, b):
978 Function for use in sorting lists of architectures.
980 Sorts normally except that 'source' dominates all others.
983 if a == "source" and b == "source":
992 ################################################################################
994 def split_args (s, dwim=1):
996 Split command line arguments which can be separated by either commas
997 or whitespace. If dwim is set, it will complain about string ending
998 in comma since this usually means someone did 'dak ls -a i386, m68k
999 foo' or something and the inevitable confusion resulting from 'm68k'
1000 being treated as an argument is undesirable.
1003 if s.find(",") == -1:
1006 if s[-1:] == "," and dwim:
1007 fubar("split_args: found trailing comma, spurious space maybe?")
1010 ################################################################################
1012 def Dict(**dict): return dict
1014 ########################################
1016 def gpgv_get_status_output(cmd, status_read, status_write):
1018 Our very own version of commands.getouputstatus(), hacked to support
1022 cmd = ['/bin/sh', '-c', cmd]
1023 p2cread, p2cwrite = os.pipe()
1024 c2pread, c2pwrite = os.pipe()
1025 errout, errin = os.pipe()
1035 for i in range(3, 256):
1036 if i != status_write:
1042 os.execvp(cmd[0], cmd)
1048 os.dup2(c2pread, c2pwrite)
1049 os.dup2(errout, errin)
1051 output = status = ""
1053 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1056 r = os.read(fd, 8196)
1058 more_data.append(fd)
1059 if fd == c2pwrite or fd == errin:
1061 elif fd == status_read:
1064 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1066 pid, exit_status = os.waitpid(pid, 0)
1068 os.close(status_write)
1069 os.close(status_read)
1079 return output, status, exit_status
1081 ################################################################################
1083 def process_gpgv_output(status):
1084 # Process the status-fd output
1087 for line in status.split('\n'):
1091 split = line.split()
1093 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1095 (gnupg, keyword) = split[:2]
1096 if gnupg != "[GNUPG:]":
1097 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1100 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1101 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1104 keywords[keyword] = args
1106 return (keywords, internal_error)
1108 ################################################################################
1110 def retrieve_key (filename, keyserver=None, keyring=None):
1112 Retrieve the key that signed 'filename' from 'keyserver' and
1113 add it to 'keyring'. Returns nothing on success, or an error message
1117 # Defaults for keyserver and keyring
1119 keyserver = Cnf["Dinstall::KeyServer"]
1121 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1123 # Ensure the filename contains no shell meta-characters or other badness
1124 if not re_taint_free.match(filename):
1125 return "%s: tainted filename" % (filename)
1127 # Invoke gpgv on the file
1128 status_read, status_write = os.pipe()
1129 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1130 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1132 # Process the status-fd output
1133 (keywords, internal_error) = process_gpgv_output(status)
1135 return internal_error
1137 if not keywords.has_key("NO_PUBKEY"):
1138 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1140 fingerprint = keywords["NO_PUBKEY"][0]
1141 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1142 # it'll try to create a lockfile in /dev. A better solution might
1143 # be a tempfile or something.
1144 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1145 % (Cnf["Dinstall::SigningKeyring"])
1146 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1147 % (keyring, keyserver, fingerprint)
1148 (result, output) = commands.getstatusoutput(cmd)
1150 return "'%s' failed with exit code %s" % (cmd, result)
1154 ################################################################################
1156 def gpg_keyring_args(keyrings=None):
1158 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1160 return " ".join(["--keyring %s" % x for x in keyrings])
1162 ################################################################################
1164 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1166 Check the signature of a file and return the fingerprint if the
1167 signature is valid or 'None' if it's not. The first argument is the
1168 filename whose signature should be checked. The second argument is a
1169 reject function and is called when an error is found. The reject()
1170 function must allow for two arguments: the first is the error message,
1171 the second is an optional prefix string. It's possible for reject()
1172 to be called more than once during an invocation of check_signature().
1173 The third argument is optional and is the name of the files the
1174 detached signature applies to. The fourth argument is optional and is
1175 a *list* of keyrings to use. 'autofetch' can either be None, True or
1176 False. If None, the default behaviour specified in the config will be
1182 # Ensure the filename contains no shell meta-characters or other badness
1183 if not re_taint_free.match(sig_filename):
1184 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1185 return (None, rejects)
1187 if data_filename and not re_taint_free.match(data_filename):
1188 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1189 return (None, rejects)
1192 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1194 # Autofetch the signing key if that's enabled
1195 if autofetch == None:
1196 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1198 error_msg = retrieve_key(sig_filename)
1200 rejects.append(error_msg)
1201 return (None, rejects)
1203 # Build the command line
1204 status_read, status_write = os.pipe()
1205 cmd = "gpgv --status-fd %s %s %s %s" % (
1206 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1208 # Invoke gpgv on the file
1209 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1211 # Process the status-fd output
1212 (keywords, internal_error) = process_gpgv_output(status)
1214 # If we failed to parse the status-fd output, let's just whine and bail now
1216 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1217 rejects.append(internal_error, "")
1218 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1219 return (None, rejects)
1221 # Now check for obviously bad things in the processed output
1222 if keywords.has_key("KEYREVOKED"):
1223 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1224 if keywords.has_key("BADSIG"):
1225 rejects.append("bad signature on %s." % (sig_filename))
1226 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1227 rejects.append("failed to check signature on %s." % (sig_filename))
1228 if keywords.has_key("NO_PUBKEY"):
1229 args = keywords["NO_PUBKEY"]
1232 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1233 if keywords.has_key("BADARMOR"):
1234 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1235 if keywords.has_key("NODATA"):
1236 rejects.append("no signature found in %s." % (sig_filename))
1237 if keywords.has_key("EXPKEYSIG"):
1238 args = keywords["EXPKEYSIG"]
1241 rejects.append("Signature made by expired key 0x%s" % (key))
1242 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1243 args = keywords["KEYEXPIRED"]
1247 if timestamp.count("T") == 0:
1249 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1251 expiredate = "unknown (%s)" % (timestamp)
1253 expiredate = timestamp
1254 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1256 if len(rejects) > 0:
1257 return (None, rejects)
1259 # Next check gpgv exited with a zero return code
1261 rejects.append("gpgv failed while checking %s." % (sig_filename))
1263 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1265 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1266 return (None, rejects)
1268 # Sanity check the good stuff we expect
1269 if not keywords.has_key("VALIDSIG"):
1270 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1272 args = keywords["VALIDSIG"]
1274 rejects.append("internal error while checking signature on %s." % (sig_filename))
1276 fingerprint = args[0]
1277 if not keywords.has_key("GOODSIG"):
1278 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1279 if not keywords.has_key("SIG_ID"):
1280 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1282 # Finally ensure there's not something we don't recognise
1283 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1284 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1285 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1287 for keyword in keywords.keys():
1288 if not known_keywords.has_key(keyword):
1289 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1291 if len(rejects) > 0:
1292 return (None, rejects)
1294 return (fingerprint, [])
1296 ################################################################################
1298 def gpg_get_key_addresses(fingerprint):
1299 """retreive email addresses from gpg key uids for a given fingerprint"""
1300 addresses = key_uid_email_cache.get(fingerprint)
1301 if addresses != None:
1304 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1305 % (gpg_keyring_args(), fingerprint)
1306 (result, output) = commands.getstatusoutput(cmd)
1308 for l in output.split('\n'):
1309 m = re_gpg_uid.match(l)
1311 addresses.add(m.group(1))
1312 key_uid_email_cache[fingerprint] = addresses
1315 ################################################################################
1317 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1319 def wrap(paragraph, max_length, prefix=""):
1323 words = paragraph.split()
1326 word_size = len(word)
1327 if word_size > max_length:
1329 s += line + '\n' + prefix
1330 s += word + '\n' + prefix
1333 new_length = len(line) + word_size + 1
1334 if new_length > max_length:
1335 s += line + '\n' + prefix
1348 ################################################################################
1350 def clean_symlink (src, dest, root):
1352 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1355 src = src.replace(root, '', 1)
1356 dest = dest.replace(root, '', 1)
1357 dest = os.path.dirname(dest)
1358 new_src = '../' * len(dest.split('/'))
1359 return new_src + src
1361 ################################################################################
1363 def temp_filename(directory=None, prefix="dak", suffix=""):
1365 Return a secure and unique filename by pre-creating it.
1366 If 'directory' is non-null, it will be the directory the file is pre-created in.
1367 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1368 If 'suffix' is non-null, the filename will end with it.
1370 Returns a pair (fd, name).
1373 return tempfile.mkstemp(suffix, prefix, directory)
1375 ################################################################################
1377 def temp_dirname(parent=None, prefix="dak", suffix=""):
1379 Return a secure and unique directory by pre-creating it.
1380 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1381 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1382 If 'suffix' is non-null, the filename will end with it.
1384 Returns a pathname to the new directory
1387 return tempfile.mkdtemp(suffix, prefix, parent)
1389 ################################################################################
1391 def is_email_alias(email):
1392 """ checks if the user part of the email is listed in the alias file """
1394 if alias_cache == None:
1395 aliasfn = which_alias_file()
1398 for l in open(aliasfn):
1399 alias_cache.add(l.split(':')[0])
1400 uid = email.split('@')[0]
1401 return uid in alias_cache
1403 ################################################################################
1405 def get_changes_files(dir):
1407 Takes a directory and lists all .changes files in it (as well as chdir'ing
1408 to the directory; this is due to broken behaviour on the part of p-u/p-a
1409 when you're not in the right place)
1411 Returns a list of filenames
1414 # Much of the rest of p-u/p-a depends on being in the right place
1416 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1418 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1420 return changes_files
1422 ################################################################################
1426 Cnf = apt_pkg.newConfiguration()
1427 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1429 if which_conf_file() != default_config:
1430 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1432 ###############################################################################