2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
51 ################################################################################
53 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
54 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
56 alias_cache = None #: Cache for email alias checks
57 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
63 ################################################################################
66 """ Escape html chars """
67 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
69 ################################################################################
71 def open_file(filename, mode='r'):
73 Open C{file}, return fileobject.
75 @type filename: string
76 @param filename: path/filename to open
79 @param mode: open mode
82 @return: open fileobject
84 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
88 f = open(filename, mode)
90 raise CantOpenError, filename
93 ################################################################################
95 def our_raw_input(prompt=""):
97 sys.stdout.write(prompt)
103 sys.stderr.write("\nUser interrupt (^D).\n")
106 ################################################################################
108 def extract_component_from_section(section):
111 if section.find('/') != -1:
112 component = section.split('/')[0]
114 # Expand default component
116 if Cnf.has_key("Component::%s" % section):
121 return (section, component)
123 ################################################################################
125 def parse_deb822(contents, signing_rules=0):
129 # Split the lines in the input, keeping the linebreaks.
130 lines = contents.splitlines(True)
133 raise ParseChangesError, "[Empty changes file]"
135 # Reindex by line number so we can easily verify the format of
141 indexed_lines[index] = line[:-1]
145 num_of_lines = len(indexed_lines.keys())
148 while index < num_of_lines:
150 line = indexed_lines[index]
152 if signing_rules == 1:
154 if index > num_of_lines:
155 raise InvalidDscError, index
156 line = indexed_lines[index]
157 if not line.startswith("-----BEGIN PGP SIGNATURE"):
158 raise InvalidDscError, index
163 if line.startswith("-----BEGIN PGP SIGNATURE"):
165 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
167 if signing_rules == 1:
168 while index < num_of_lines and line != "":
170 line = indexed_lines[index]
172 # If we're not inside the signed data, don't process anything
173 if signing_rules >= 0 and not inside_signature:
175 slf = re_single_line_field.match(line)
177 field = slf.groups()[0].lower()
178 changes[field] = slf.groups()[1]
182 changes[field] += '\n'
184 mlf = re_multi_line_field.match(line)
187 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
188 if first == 1 and changes[field] != "":
189 changes[field] += '\n'
191 changes[field] += mlf.groups()[0] + '\n'
195 if signing_rules == 1 and inside_signature:
196 raise InvalidDscError, index
198 changes["filecontents"] = "".join(lines)
200 if changes.has_key("source"):
201 # Strip the source version in brackets from the source field,
202 # put it in the "source-version" field instead.
203 srcver = re_srchasver.search(changes["source"])
205 changes["source"] = srcver.group(1)
206 changes["source-version"] = srcver.group(2)
209 raise ParseChangesError, error
213 ################################################################################
215 def parse_changes(filename, signing_rules=0):
217 Parses a changes file and returns a dictionary where each field is a
218 key. The mandatory first argument is the filename of the .changes
221 signing_rules is an optional argument:
223 - If signing_rules == -1, no signature is required.
224 - If signing_rules == 0 (the default), a signature is required.
225 - If signing_rules == 1, it turns on the same strict format checking
228 The rules for (signing_rules == 1)-mode are:
230 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
231 followed by any PGP header data and must end with a blank line.
233 - The data section must end with a blank line and must be followed by
234 "-----BEGIN PGP SIGNATURE-----".
237 changes_in = open_file(filename)
238 content = changes_in.read()
241 unicode(content, 'utf-8')
243 raise ChangesUnicodeError, "Changes file not proper utf-8"
244 return parse_deb822(content, signing_rules)
246 ################################################################################
248 def hash_key(hashname):
249 return '%ssum' % hashname
251 ################################################################################
253 def create_hash(where, files, hashname, hashfunc):
255 create_hash extends the passed files dict with the given hash by
256 iterating over all files on disk and passing them to the hashing
261 for f in files.keys():
263 file_handle = open_file(f)
264 except CantOpenError:
265 rejmsg.append("Could not open file %s for checksumming" % (f))
268 files[f][hash_key(hashname)] = hashfunc(file_handle)
273 ################################################################################
275 def check_hash(where, files, hashname, hashfunc):
277 check_hash checks the given hash in the files dict against the actual
278 files on disk. The hash values need to be present consistently in
279 all file entries. It does not modify its input in any way.
283 for f in files.keys():
287 file_handle = open_file(f)
289 # Check for the hash entry, to not trigger a KeyError.
290 if not files[f].has_key(hash_key(hashname)):
291 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
295 # Actually check the hash for correctness.
296 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
297 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
299 except CantOpenError:
300 # TODO: This happens when the file is in the pool.
301 # warn("Cannot open file %s" % f)
308 ################################################################################
310 def check_size(where, files):
312 check_size checks the file sizes in the passed files dict against the
317 for f in files.keys():
322 # TODO: This happens when the file is in the pool.
326 actual_size = entry[stat.ST_SIZE]
327 size = int(files[f]["size"])
328 if size != actual_size:
329 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
330 % (f, actual_size, size, where))
333 ################################################################################
335 def check_hash_fields(what, manifest):
337 check_hash_fields ensures that there are no checksum fields in the
338 given dict that we do not know about.
342 hashes = map(lambda x: x[0], known_hashes)
343 for field in manifest:
344 if field.startswith("checksums-"):
345 hashname = field.split("-",1)[1]
346 if hashname not in hashes:
347 rejmsg.append("Unsupported checksum field for %s "\
348 "in %s" % (hashname, what))
351 ################################################################################
353 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
354 if format >= version:
355 # The version should contain the specified hash.
358 # Import hashes from the changes
359 rejmsg = parse_checksums(".changes", files, changes, hashname)
363 # We need to calculate the hash because it can't possibly
366 return func(".changes", files, hashname, hashfunc)
368 # We could add the orig which might be in the pool to the files dict to
369 # access the checksums easily.
371 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
373 ensure_dsc_hashes' task is to ensure that each and every *present* hash
374 in the dsc is correct, i.e. identical to the changes file and if necessary
375 the pool. The latter task is delegated to check_hash.
379 if not dsc.has_key('Checksums-%s' % (hashname,)):
381 # Import hashes from the dsc
382 parse_checksums(".dsc", dsc_files, dsc, hashname)
384 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
387 ################################################################################
389 def ensure_hashes(changes, dsc, files, dsc_files):
392 # Make sure we recognise the format of the Files: field in the .changes
393 format = changes.get("format", "0.0").split(".", 1)
395 format = int(format[0]), int(format[1])
397 format = int(float(format[0])), 0
399 # We need to deal with the original changes blob, as the fields we need
400 # might not be in the changes dict serialised into the .dak anymore.
401 orig_changes = parse_deb822(changes['filecontents'])
403 # Copy the checksums over to the current changes dict. This will keep
404 # the existing modifications to it intact.
405 for field in orig_changes:
406 if field.startswith('checksums-'):
407 changes[field] = orig_changes[field]
409 # Check for unsupported hashes
410 rejmsg.extend(check_hash_fields(".changes", changes))
411 rejmsg.extend(check_hash_fields(".dsc", dsc))
413 # We have to calculate the hash if we have an earlier changes version than
414 # the hash appears in rather than require it exist in the changes file
415 for hashname, hashfunc, version in known_hashes:
416 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
418 if "source" in changes["architecture"]:
419 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
424 def parse_checksums(where, files, manifest, hashname):
426 field = 'checksums-%s' % hashname
427 if not field in manifest:
429 for line in manifest[field].split('\n'):
432 checksum, size, checkfile = line.strip().split(' ')
433 if not files.has_key(checkfile):
434 # TODO: check for the file's entry in the original files dict, not
435 # the one modified by (auto)byhand and other weird stuff
436 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
437 # (file, hashname, where))
439 if not files[checkfile]["size"] == size:
440 rejmsg.append("%s: size differs for files and checksums-%s entry "\
441 "in %s" % (checkfile, hashname, where))
443 files[checkfile][hash_key(hashname)] = checksum
444 for f in files.keys():
445 if not files[f].has_key(hash_key(hashname)):
446 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
450 ################################################################################
452 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
454 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
457 # Make sure we have a Files: field to parse...
458 if not changes.has_key(field):
459 raise NoFilesFieldError
461 # Make sure we recognise the format of the Files: field
462 format = re_verwithext.search(changes.get("format", "0.0"))
464 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
466 format = format.groups()
467 if format[1] == None:
468 format = int(float(format[0])), 0, format[2]
470 format = int(format[0]), int(format[1]), format[2]
471 if format[2] == None:
475 # format = (1,0) are the only formats we currently accept,
476 # format = (0,0) are missing format headers of which we still
477 # have some in the archive.
478 if format != (1,0) and format != (0,0):
479 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
481 if (format < (1,5) or format > (1,8)):
482 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
483 if field != "files" and format < (1,8):
484 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
486 includes_section = (not is_a_dsc) and field == "files"
488 # Parse each entry/line:
489 for i in changes[field].split('\n'):
493 section = priority = ""
496 (md5, size, section, priority, name) = s
498 (md5, size, name) = s
500 raise ParseChangesError, i
507 (section, component) = extract_component_from_section(section)
509 files[name] = Dict(size=size, section=section,
510 priority=priority, component=component)
511 files[name][hashname] = md5
515 ################################################################################
517 def send_mail (message, filename=""):
518 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
520 # If we've been passed a string dump it into a temporary file
522 (fd, filename) = tempfile.mkstemp()
523 os.write (fd, message)
526 if Cnf.has_key("Dinstall::MailWhiteList") and \
527 Cnf["Dinstall::MailWhiteList"] != "":
528 message_in = open_file(filename)
529 message_raw = modemail.message_from_file(message_in)
533 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
535 for line in whitelist_in:
536 if not re_whitespace_comment.match(line):
537 if re_re_mark.match(line):
538 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
540 whitelist.append(re.compile(re.escape(line.strip())))
545 fields = ["To", "Bcc", "Cc"]
548 value = message_raw.get(field, None)
551 for item in value.split(","):
552 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
558 if not mail_whitelisted:
559 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
563 # Doesn't have any mail in whitelist so remove the header
565 del message_raw[field]
567 message_raw.replace_header(field, string.join(match, ", "))
569 # Change message fields in order if we don't have a To header
570 if not message_raw.has_key("To"):
573 if message_raw.has_key(field):
574 message_raw[fields[-1]] = message_raw[field]
575 del message_raw[field]
578 # Clean up any temporary files
579 # and return, as we removed all recipients.
581 os.unlink (filename);
584 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
585 os.write (fd, message_raw.as_string(True));
589 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
591 raise SendmailFailedError, output
593 # Clean up any temporary files
597 ################################################################################
599 def poolify (source, component):
602 if source[:3] == "lib":
603 return component + source[:4] + '/' + source + '/'
605 return component + source[:1] + '/' + source + '/'
607 ################################################################################
609 def move (src, dest, overwrite = 0, perms = 0664):
610 if os.path.exists(dest) and os.path.isdir(dest):
613 dest_dir = os.path.dirname(dest)
614 if not os.path.exists(dest_dir):
615 umask = os.umask(00000)
616 os.makedirs(dest_dir, 02775)
618 #print "Moving %s to %s..." % (src, dest)
619 if os.path.exists(dest) and os.path.isdir(dest):
620 dest += '/' + os.path.basename(src)
621 # Don't overwrite unless forced to
622 if os.path.exists(dest):
624 fubar("Can't move %s to %s - file already exists." % (src, dest))
626 if not os.access(dest, os.W_OK):
627 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
628 shutil.copy2(src, dest)
629 os.chmod(dest, perms)
632 def copy (src, dest, overwrite = 0, perms = 0664):
633 if os.path.exists(dest) and os.path.isdir(dest):
636 dest_dir = os.path.dirname(dest)
637 if not os.path.exists(dest_dir):
638 umask = os.umask(00000)
639 os.makedirs(dest_dir, 02775)
641 #print "Copying %s to %s..." % (src, dest)
642 if os.path.exists(dest) and os.path.isdir(dest):
643 dest += '/' + os.path.basename(src)
644 # Don't overwrite unless forced to
645 if os.path.exists(dest):
647 raise FileExistsError
649 if not os.access(dest, os.W_OK):
650 raise CantOverwriteError
651 shutil.copy2(src, dest)
652 os.chmod(dest, perms)
654 ################################################################################
657 res = socket.gethostbyaddr(socket.gethostname())
658 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
659 if database_hostname:
660 return database_hostname
664 def which_conf_file ():
665 res = socket.gethostbyaddr(socket.gethostname())
666 # In case we allow local config files per user, try if one exists
667 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
668 homedir = os.getenv("HOME")
669 confpath = os.path.join(homedir, "/etc/dak.conf")
670 if os.path.exists(confpath):
671 apt_pkg.ReadConfigFileISC(Cnf,default_config)
673 # We are still in here, so there is no local config file or we do
674 # not allow local files. Do the normal stuff.
675 if Cnf.get("Config::" + res[0] + "::DakConfig"):
676 return Cnf["Config::" + res[0] + "::DakConfig"]
678 return default_config
680 def which_apt_conf_file ():
681 res = socket.gethostbyaddr(socket.gethostname())
682 # In case we allow local config files per user, try if one exists
683 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
684 homedir = os.getenv("HOME")
685 confpath = os.path.join(homedir, "/etc/dak.conf")
686 if os.path.exists(confpath):
687 apt_pkg.ReadConfigFileISC(Cnf,default_config)
689 if Cnf.get("Config::" + res[0] + "::AptConfig"):
690 return Cnf["Config::" + res[0] + "::AptConfig"]
692 return default_apt_config
694 def which_alias_file():
695 hostname = socket.gethostbyaddr(socket.gethostname())[0]
696 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
697 if os.path.exists(aliasfn):
702 ################################################################################
704 def TemplateSubst(map, filename):
705 """ Perform a substition of template """
706 templatefile = open_file(filename)
707 template = templatefile.read()
709 template = template.replace(x,map[x])
713 ################################################################################
715 def fubar(msg, exit_code=1):
716 sys.stderr.write("E: %s\n" % (msg))
720 sys.stderr.write("W: %s\n" % (msg))
722 ################################################################################
724 # Returns the user name with a laughable attempt at rfc822 conformancy
725 # (read: removing stray periods).
727 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
729 ################################################################################
739 return ("%d%s" % (c, t))
741 ################################################################################
743 def cc_fix_changes (changes):
744 o = changes.get("architecture", "")
746 del changes["architecture"]
747 changes["architecture"] = {}
749 changes["architecture"][j] = 1
751 def changes_compare (a, b):
752 """ Sort by source name, source version, 'have source', and then by filename """
754 a_changes = parse_changes(a)
759 b_changes = parse_changes(b)
763 cc_fix_changes (a_changes)
764 cc_fix_changes (b_changes)
766 # Sort by source name
767 a_source = a_changes.get("source")
768 b_source = b_changes.get("source")
769 q = cmp (a_source, b_source)
773 # Sort by source version
774 a_version = a_changes.get("version", "0")
775 b_version = b_changes.get("version", "0")
776 q = apt_pkg.VersionCompare(a_version, b_version)
780 # Sort by 'have source'
781 a_has_source = a_changes["architecture"].get("source")
782 b_has_source = b_changes["architecture"].get("source")
783 if a_has_source and not b_has_source:
785 elif b_has_source and not a_has_source:
788 # Fall back to sort by filename
791 ################################################################################
793 def find_next_free (dest, too_many=100):
796 while os.path.exists(dest) and extra < too_many:
797 dest = orig_dest + '.' + repr(extra)
799 if extra >= too_many:
800 raise NoFreeFilenameError
803 ################################################################################
805 def result_join (original, sep = '\t'):
807 for i in xrange(len(original)):
808 if original[i] == None:
809 resultlist.append("")
811 resultlist.append(original[i])
812 return sep.join(resultlist)
814 ################################################################################
816 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
818 for line in str.split('\n'):
820 if line or include_blank_lines:
821 out += "%s%s\n" % (prefix, line)
822 # Strip trailing new line
827 ################################################################################
829 def validate_changes_file_arg(filename, require_changes=1):
831 'filename' is either a .changes or .dak file. If 'filename' is a
832 .dak file, it's changed to be the corresponding .changes file. The
833 function then checks if the .changes file a) exists and b) is
834 readable and returns the .changes filename if so. If there's a
835 problem, the next action depends on the option 'require_changes'
838 - If 'require_changes' == -1, errors are ignored and the .changes
839 filename is returned.
840 - If 'require_changes' == 0, a warning is given and 'None' is returned.
841 - If 'require_changes' == 1, a fatal error is raised.
846 orig_filename = filename
847 if filename.endswith(".dak"):
848 filename = filename[:-4]+".changes"
850 if not filename.endswith(".changes"):
851 error = "invalid file type; not a changes file"
853 if not os.access(filename,os.R_OK):
854 if os.path.exists(filename):
855 error = "permission denied"
857 error = "file not found"
860 if require_changes == 1:
861 fubar("%s: %s." % (orig_filename, error))
862 elif require_changes == 0:
863 warn("Skipping %s - %s" % (orig_filename, error))
865 else: # We only care about the .dak file
870 ################################################################################
873 return (arch != "source" and arch != "all")
875 ################################################################################
877 def join_with_commas_and(list):
878 if len(list) == 0: return "nothing"
879 if len(list) == 1: return list[0]
880 return ", ".join(list[:-1]) + " and " + list[-1]
882 ################################################################################
887 (pkg, version, constraint) = atom
889 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
892 pp_deps.append(pp_dep)
893 return " |".join(pp_deps)
895 ################################################################################
900 ################################################################################
902 def parse_args(Options):
903 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
904 # XXX: This should go away and everything which calls it be converted
905 # to use SQLA properly. For now, we'll just fix it not to use
906 # the old Pg interface though
907 session = DBConn().session()
911 for suitename in split_args(Options["Suite"]):
912 suite = get_suite(suitename, session=session)
914 warn("suite '%s' not recognised." % (suitename))
916 suite_ids_list.append(suite.suite_id)
918 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
920 fubar("No valid suite given.")
925 if Options["Component"]:
926 component_ids_list = []
927 for componentname in split_args(Options["Component"]):
928 component = get_component(componentname, session=session)
929 if component is None:
930 warn("component '%s' not recognised." % (componentname))
932 component_ids_list.append(component.component_id)
933 if component_ids_list:
934 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
936 fubar("No valid component given.")
940 # Process architecture
941 con_architectures = ""
943 if Options["Architecture"]:
945 for archname in split_args(Options["Architecture"]):
946 if archname == "source":
949 arch = get_architecture(archname, session=session)
951 warn("architecture '%s' not recognised." % (archname))
953 arch_ids_list.append(arch.arch_id)
955 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
958 fubar("No valid architecture given.")
962 return (con_suites, con_architectures, con_components, check_source)
964 ################################################################################
966 # Inspired(tm) by Bryn Keller's print_exc_plus (See
967 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
970 tb = sys.exc_info()[2]
979 traceback.print_exc()
981 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
982 frame.f_code.co_filename,
984 for key, value in frame.f_locals.items():
985 print "\t%20s = " % key,
989 print "<unable to print>"
991 ################################################################################
993 def try_with_debug(function):
1001 ################################################################################
1003 def arch_compare_sw (a, b):
1005 Function for use in sorting lists of architectures.
1007 Sorts normally except that 'source' dominates all others.
1010 if a == "source" and b == "source":
1019 ################################################################################
1021 def split_args (s, dwim=1):
1023 Split command line arguments which can be separated by either commas
1024 or whitespace. If dwim is set, it will complain about string ending
1025 in comma since this usually means someone did 'dak ls -a i386, m68k
1026 foo' or something and the inevitable confusion resulting from 'm68k'
1027 being treated as an argument is undesirable.
1030 if s.find(",") == -1:
1033 if s[-1:] == "," and dwim:
1034 fubar("split_args: found trailing comma, spurious space maybe?")
1037 ################################################################################
1039 def Dict(**dict): return dict
1041 ########################################
1043 def gpgv_get_status_output(cmd, status_read, status_write):
1045 Our very own version of commands.getouputstatus(), hacked to support
1049 cmd = ['/bin/sh', '-c', cmd]
1050 p2cread, p2cwrite = os.pipe()
1051 c2pread, c2pwrite = os.pipe()
1052 errout, errin = os.pipe()
1062 for i in range(3, 256):
1063 if i != status_write:
1069 os.execvp(cmd[0], cmd)
1075 os.dup2(c2pread, c2pwrite)
1076 os.dup2(errout, errin)
1078 output = status = ""
1080 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1083 r = os.read(fd, 8196)
1085 more_data.append(fd)
1086 if fd == c2pwrite or fd == errin:
1088 elif fd == status_read:
1091 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1093 pid, exit_status = os.waitpid(pid, 0)
1095 os.close(status_write)
1096 os.close(status_read)
1106 return output, status, exit_status
1108 ################################################################################
1110 def process_gpgv_output(status):
1111 # Process the status-fd output
1114 for line in status.split('\n'):
1118 split = line.split()
1120 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1122 (gnupg, keyword) = split[:2]
1123 if gnupg != "[GNUPG:]":
1124 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1127 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1128 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1131 keywords[keyword] = args
1133 return (keywords, internal_error)
1135 ################################################################################
1137 def retrieve_key (filename, keyserver=None, keyring=None):
1139 Retrieve the key that signed 'filename' from 'keyserver' and
1140 add it to 'keyring'. Returns nothing on success, or an error message
1144 # Defaults for keyserver and keyring
1146 keyserver = Cnf["Dinstall::KeyServer"]
1148 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1150 # Ensure the filename contains no shell meta-characters or other badness
1151 if not re_taint_free.match(filename):
1152 return "%s: tainted filename" % (filename)
1154 # Invoke gpgv on the file
1155 status_read, status_write = os.pipe()
1156 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1157 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1159 # Process the status-fd output
1160 (keywords, internal_error) = process_gpgv_output(status)
1162 return internal_error
1164 if not keywords.has_key("NO_PUBKEY"):
1165 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1167 fingerprint = keywords["NO_PUBKEY"][0]
1168 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1169 # it'll try to create a lockfile in /dev. A better solution might
1170 # be a tempfile or something.
1171 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1172 % (Cnf["Dinstall::SigningKeyring"])
1173 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1174 % (keyring, keyserver, fingerprint)
1175 (result, output) = commands.getstatusoutput(cmd)
1177 return "'%s' failed with exit code %s" % (cmd, result)
1181 ################################################################################
1183 def gpg_keyring_args(keyrings=None):
1185 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1187 return " ".join(["--keyring %s" % x for x in keyrings])
1189 ################################################################################
1191 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1193 Check the signature of a file and return the fingerprint if the
1194 signature is valid or 'None' if it's not. The first argument is the
1195 filename whose signature should be checked. The second argument is a
1196 reject function and is called when an error is found. The reject()
1197 function must allow for two arguments: the first is the error message,
1198 the second is an optional prefix string. It's possible for reject()
1199 to be called more than once during an invocation of check_signature().
1200 The third argument is optional and is the name of the files the
1201 detached signature applies to. The fourth argument is optional and is
1202 a *list* of keyrings to use. 'autofetch' can either be None, True or
1203 False. If None, the default behaviour specified in the config will be
1207 # Ensure the filename contains no shell meta-characters or other badness
1208 if not re_taint_free.match(sig_filename):
1209 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1212 if data_filename and not re_taint_free.match(data_filename):
1213 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1217 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1219 # Autofetch the signing key if that's enabled
1220 if autofetch == None:
1221 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1223 error_msg = retrieve_key(sig_filename)
1228 # Build the command line
1229 status_read, status_write = os.pipe()
1230 cmd = "gpgv --status-fd %s %s %s %s" % (
1231 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1233 # Invoke gpgv on the file
1234 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1236 # Process the status-fd output
1237 (keywords, internal_error) = process_gpgv_output(status)
1239 # If we failed to parse the status-fd output, let's just whine and bail now
1241 reject("internal error while performing signature check on %s." % (sig_filename))
1242 reject(internal_error, "")
1243 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1247 # Now check for obviously bad things in the processed output
1248 if keywords.has_key("KEYREVOKED"):
1249 reject("The key used to sign %s has been revoked." % (sig_filename))
1251 if keywords.has_key("BADSIG"):
1252 reject("bad signature on %s." % (sig_filename))
1254 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1255 reject("failed to check signature on %s." % (sig_filename))
1257 if keywords.has_key("NO_PUBKEY"):
1258 args = keywords["NO_PUBKEY"]
1261 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1263 if keywords.has_key("BADARMOR"):
1264 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1266 if keywords.has_key("NODATA"):
1267 reject("no signature found in %s." % (sig_filename))
1269 if keywords.has_key("EXPKEYSIG"):
1270 args = keywords["EXPKEYSIG"]
1273 reject("Signature made by expired key 0x%s" % (key))
1275 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1276 args = keywords["KEYEXPIRED"]
1280 if timestamp.count("T") == 0:
1282 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1284 expiredate = "unknown (%s)" % (timestamp)
1286 expiredate = timestamp
1287 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1293 # Next check gpgv exited with a zero return code
1295 reject("gpgv failed while checking %s." % (sig_filename))
1297 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1299 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1302 # Sanity check the good stuff we expect
1303 if not keywords.has_key("VALIDSIG"):
1304 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1307 args = keywords["VALIDSIG"]
1309 reject("internal error while checking signature on %s." % (sig_filename))
1312 fingerprint = args[0]
1313 if not keywords.has_key("GOODSIG"):
1314 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1316 if not keywords.has_key("SIG_ID"):
1317 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1320 # Finally ensure there's not something we don't recognise
1321 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1322 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1323 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1325 for keyword in keywords.keys():
1326 if not known_keywords.has_key(keyword):
1327 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1335 ################################################################################
1337 def gpg_get_key_addresses(fingerprint):
1338 """retreive email addresses from gpg key uids for a given fingerprint"""
1339 addresses = key_uid_email_cache.get(fingerprint)
1340 if addresses != None:
1343 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1344 % (gpg_keyring_args(), fingerprint)
1345 (result, output) = commands.getstatusoutput(cmd)
1347 for l in output.split('\n'):
1348 m = re_gpg_uid.match(l)
1350 addresses.add(m.group(1))
1351 key_uid_email_cache[fingerprint] = addresses
1354 ################################################################################
1356 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1358 def wrap(paragraph, max_length, prefix=""):
1362 words = paragraph.split()
1365 word_size = len(word)
1366 if word_size > max_length:
1368 s += line + '\n' + prefix
1369 s += word + '\n' + prefix
1372 new_length = len(line) + word_size + 1
1373 if new_length > max_length:
1374 s += line + '\n' + prefix
1387 ################################################################################
1389 def clean_symlink (src, dest, root):
1391 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1394 src = src.replace(root, '', 1)
1395 dest = dest.replace(root, '', 1)
1396 dest = os.path.dirname(dest)
1397 new_src = '../' * len(dest.split('/'))
1398 return new_src + src
1400 ################################################################################
1402 def temp_filename(directory=None, prefix="dak", suffix=""):
1404 Return a secure and unique filename by pre-creating it.
1405 If 'directory' is non-null, it will be the directory the file is pre-created in.
1406 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1407 If 'suffix' is non-null, the filename will end with it.
1409 Returns a pair (fd, name).
1412 return tempfile.mkstemp(suffix, prefix, directory)
1414 ################################################################################
1416 def temp_dirname(parent=None, prefix="dak", suffix=""):
1418 Return a secure and unique directory by pre-creating it.
1419 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1420 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1421 If 'suffix' is non-null, the filename will end with it.
1423 Returns a pathname to the new directory
1426 return tempfile.mkdtemp(suffix, prefix, parent)
1428 ################################################################################
1430 def is_email_alias(email):
1431 """ checks if the user part of the email is listed in the alias file """
1433 if alias_cache == None:
1434 aliasfn = which_alias_file()
1437 for l in open(aliasfn):
1438 alias_cache.add(l.split(':')[0])
1439 uid = email.split('@')[0]
1440 return uid in alias_cache
1442 ################################################################################
1444 def get_changes_files(dir):
1446 Takes a directory and lists all .changes files in it (as well as chdir'ing
1447 to the directory; this is due to broken behaviour on the part of p-u/p-a
1448 when you're not in the right place)
1450 Returns a list of filenames
1453 # Much of the rest of p-u/p-a depends on being in the right place
1455 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1457 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1459 return changes_files
1461 ################################################################################
1465 Cnf = apt_pkg.newConfiguration()
1466 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1468 if which_conf_file() != default_config:
1469 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1471 ###############################################################################