2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite
44 from dak_exceptions import *
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_verwithext, \
48 re_parse_maintainer, re_taint_free, re_gpg_uid, re_re_mark, \
51 ################################################################################
53 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
54 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
56 alias_cache = None #: Cache for email alias checks
57 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
63 ################################################################################
66 """ Escape html chars """
67 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
69 ################################################################################
71 def open_file(filename, mode='r'):
73 Open C{file}, return fileobject.
75 @type filename: string
76 @param filename: path/filename to open
79 @param mode: open mode
82 @return: open fileobject
84 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
88 f = open(filename, mode)
90 raise CantOpenError, filename
93 ################################################################################
95 def our_raw_input(prompt=""):
97 sys.stdout.write(prompt)
103 sys.stderr.write("\nUser interrupt (^D).\n")
106 ################################################################################
108 def extract_component_from_section(section):
111 if section.find('/') != -1:
112 component = section.split('/')[0]
114 # Expand default component
116 if Cnf.has_key("Component::%s" % section):
121 return (section, component)
123 ################################################################################
125 def parse_deb822(contents, signing_rules=0):
129 # Split the lines in the input, keeping the linebreaks.
130 lines = contents.splitlines(True)
133 raise ParseChangesError, "[Empty changes file]"
135 # Reindex by line number so we can easily verify the format of
141 indexed_lines[index] = line[:-1]
145 num_of_lines = len(indexed_lines.keys())
148 while index < num_of_lines:
150 line = indexed_lines[index]
152 if signing_rules == 1:
154 if index > num_of_lines:
155 raise InvalidDscError, index
156 line = indexed_lines[index]
157 if not line.startswith("-----BEGIN PGP SIGNATURE"):
158 raise InvalidDscError, index
163 if line.startswith("-----BEGIN PGP SIGNATURE"):
165 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
167 if signing_rules == 1:
168 while index < num_of_lines and line != "":
170 line = indexed_lines[index]
172 # If we're not inside the signed data, don't process anything
173 if signing_rules >= 0 and not inside_signature:
175 slf = re_single_line_field.match(line)
177 field = slf.groups()[0].lower()
178 changes[field] = slf.groups()[1]
182 changes[field] += '\n'
184 mlf = re_multi_line_field.match(line)
187 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
188 if first == 1 and changes[field] != "":
189 changes[field] += '\n'
191 changes[field] += mlf.groups()[0] + '\n'
195 if signing_rules == 1 and inside_signature:
196 raise InvalidDscError, index
198 changes["filecontents"] = "".join(lines)
200 if changes.has_key("source"):
201 # Strip the source version in brackets from the source field,
202 # put it in the "source-version" field instead.
203 srcver = re_srchasver.search(changes["source"])
205 changes["source"] = srcver.group(1)
206 changes["source-version"] = srcver.group(2)
209 raise ParseChangesError, error
213 ################################################################################
215 def parse_changes(filename, signing_rules=0):
217 Parses a changes file and returns a dictionary where each field is a
218 key. The mandatory first argument is the filename of the .changes
221 signing_rules is an optional argument:
223 - If signing_rules == -1, no signature is required.
224 - If signing_rules == 0 (the default), a signature is required.
225 - If signing_rules == 1, it turns on the same strict format checking
228 The rules for (signing_rules == 1)-mode are:
230 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
231 followed by any PGP header data and must end with a blank line.
233 - The data section must end with a blank line and must be followed by
234 "-----BEGIN PGP SIGNATURE-----".
237 changes_in = open_file(filename)
238 content = changes_in.read()
241 unicode(content, 'utf-8')
243 raise ChangesUnicodeError, "Changes file not proper utf-8"
244 return parse_deb822(content, signing_rules)
246 ################################################################################
248 def hash_key(hashname):
249 return '%ssum' % hashname
251 ################################################################################
253 def create_hash(where, files, hashname, hashfunc):
255 create_hash extends the passed files dict with the given hash by
256 iterating over all files on disk and passing them to the hashing
261 for f in files.keys():
263 file_handle = open_file(f)
264 except CantOpenError:
265 rejmsg.append("Could not open file %s for checksumming" % (f))
268 files[f][hash_key(hashname)] = hashfunc(file_handle)
273 ################################################################################
275 def check_hash(where, files, hashname, hashfunc):
277 check_hash checks the given hash in the files dict against the actual
278 files on disk. The hash values need to be present consistently in
279 all file entries. It does not modify its input in any way.
283 for f in files.keys():
287 file_handle = open_file(f)
289 # Check for the hash entry, to not trigger a KeyError.
290 if not files[f].has_key(hash_key(hashname)):
291 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
295 # Actually check the hash for correctness.
296 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
297 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
299 except CantOpenError:
300 # TODO: This happens when the file is in the pool.
301 # warn("Cannot open file %s" % f)
308 ################################################################################
310 def check_size(where, files):
312 check_size checks the file sizes in the passed files dict against the
317 for f in files.keys():
322 # TODO: This happens when the file is in the pool.
326 actual_size = entry[stat.ST_SIZE]
327 size = int(files[f]["size"])
328 if size != actual_size:
329 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
330 % (f, actual_size, size, where))
333 ################################################################################
335 def check_hash_fields(what, manifest):
337 check_hash_fields ensures that there are no checksum fields in the
338 given dict that we do not know about.
342 hashes = map(lambda x: x[0], known_hashes)
343 for field in manifest:
344 if field.startswith("checksums-"):
345 hashname = field.split("-",1)[1]
346 if hashname not in hashes:
347 rejmsg.append("Unsupported checksum field for %s "\
348 "in %s" % (hashname, what))
351 ################################################################################
353 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
354 if format >= version:
355 # The version should contain the specified hash.
358 # Import hashes from the changes
359 rejmsg = parse_checksums(".changes", files, changes, hashname)
363 # We need to calculate the hash because it can't possibly
366 return func(".changes", files, hashname, hashfunc)
368 # We could add the orig which might be in the pool to the files dict to
369 # access the checksums easily.
371 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
373 ensure_dsc_hashes' task is to ensure that each and every *present* hash
374 in the dsc is correct, i.e. identical to the changes file and if necessary
375 the pool. The latter task is delegated to check_hash.
379 if not dsc.has_key('Checksums-%s' % (hashname,)):
381 # Import hashes from the dsc
382 parse_checksums(".dsc", dsc_files, dsc, hashname)
384 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
387 ################################################################################
389 def ensure_hashes(changes, dsc, files, dsc_files):
392 # Make sure we recognise the format of the Files: field in the .changes
393 format = changes.get("format", "0.0").split(".", 1)
395 format = int(format[0]), int(format[1])
397 format = int(float(format[0])), 0
399 # We need to deal with the original changes blob, as the fields we need
400 # might not be in the changes dict serialised into the .dak anymore.
401 orig_changes = parse_deb822(changes['filecontents'])
403 # Copy the checksums over to the current changes dict. This will keep
404 # the existing modifications to it intact.
405 for field in orig_changes:
406 if field.startswith('checksums-'):
407 changes[field] = orig_changes[field]
409 # Check for unsupported hashes
410 rejmsg.extend(check_hash_fields(".changes", changes))
411 rejmsg.extend(check_hash_fields(".dsc", dsc))
413 # We have to calculate the hash if we have an earlier changes version than
414 # the hash appears in rather than require it exist in the changes file
415 for hashname, hashfunc, version in known_hashes:
416 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
418 if "source" in changes["architecture"]:
419 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
424 def parse_checksums(where, files, manifest, hashname):
426 field = 'checksums-%s' % hashname
427 if not field in manifest:
429 for line in manifest[field].split('\n'):
432 checksum, size, checkfile = line.strip().split(' ')
433 if not files.has_key(checkfile):
434 # TODO: check for the file's entry in the original files dict, not
435 # the one modified by (auto)byhand and other weird stuff
436 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
437 # (file, hashname, where))
439 if not files[checkfile]["size"] == size:
440 rejmsg.append("%s: size differs for files and checksums-%s entry "\
441 "in %s" % (checkfile, hashname, where))
443 files[checkfile][hash_key(hashname)] = checksum
444 for f in files.keys():
445 if not files[f].has_key(hash_key(hashname)):
446 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
450 ################################################################################
452 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
454 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
457 # Make sure we have a Files: field to parse...
458 if not changes.has_key(field):
459 raise NoFilesFieldError
461 # Make sure we recognise the format of the Files: field
462 format = re_verwithext.search(changes.get("format", "0.0"))
464 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
466 format = format.groups()
467 if format[1] == None:
468 format = int(float(format[0])), 0, format[2]
470 format = int(format[0]), int(format[1]), format[2]
471 if format[2] == None:
475 # format = (1,0) are the only formats we currently accept,
476 # format = (0,0) are missing format headers of which we still
477 # have some in the archive.
478 if format != (1,0) and format != (0,0):
479 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
481 if (format < (1,5) or format > (1,8)):
482 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
483 if field != "files" and format < (1,8):
484 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
486 includes_section = (not is_a_dsc) and field == "files"
488 # Parse each entry/line:
489 for i in changes[field].split('\n'):
493 section = priority = ""
496 (md5, size, section, priority, name) = s
498 (md5, size, name) = s
500 raise ParseChangesError, i
507 (section, component) = extract_component_from_section(section)
509 files[name] = Dict(size=size, section=section,
510 priority=priority, component=component)
511 files[name][hashname] = md5
515 ################################################################################
517 def send_mail (message, filename=""):
518 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
520 # If we've been passed a string dump it into a temporary file
522 (fd, filename) = tempfile.mkstemp()
523 os.write (fd, message)
526 if Cnf.has_key("Dinstall::MailWhiteList") and \
527 Cnf["Dinstall::MailWhiteList"] != "":
528 message_in = open_file(filename)
529 message_raw = modemail.message_from_file(message_in)
533 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
535 for line in whitelist_in:
536 if not re_whitespace_comment.match(line):
537 if re_re_mark.match(line):
538 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
540 whitelist.append(re.compile(re.escape(line.strip())))
545 fields = ["To", "Bcc", "Cc"]
548 value = message_raw.get(field, None)
551 for item in value.split(","):
552 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
558 if not mail_whitelisted:
559 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
563 # Doesn't have any mail in whitelist so remove the header
565 del message_raw[field]
567 message_raw.replace_header(field, string.join(match, ", "))
569 # Change message fields in order if we don't have a To header
570 if not message_raw.has_key("To"):
573 if message_raw.has_key(field):
574 message_raw[fields[-1]] = message_raw[field]
575 del message_raw[field]
578 # Clean up any temporary files
579 # and return, as we removed all recipients.
581 os.unlink (filename);
584 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
585 os.write (fd, message_raw.as_string(True));
589 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
591 raise SendmailFailedError, output
593 # Clean up any temporary files
597 ################################################################################
599 def poolify (source, component):
602 if source[:3] == "lib":
603 return component + source[:4] + '/' + source + '/'
605 return component + source[:1] + '/' + source + '/'
607 ################################################################################
609 def move (src, dest, overwrite = 0, perms = 0664):
610 if os.path.exists(dest) and os.path.isdir(dest):
613 dest_dir = os.path.dirname(dest)
614 if not os.path.exists(dest_dir):
615 umask = os.umask(00000)
616 os.makedirs(dest_dir, 02775)
618 #print "Moving %s to %s..." % (src, dest)
619 if os.path.exists(dest) and os.path.isdir(dest):
620 dest += '/' + os.path.basename(src)
621 # Don't overwrite unless forced to
622 if os.path.exists(dest):
624 fubar("Can't move %s to %s - file already exists." % (src, dest))
626 if not os.access(dest, os.W_OK):
627 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
628 shutil.copy2(src, dest)
629 os.chmod(dest, perms)
632 def copy (src, dest, overwrite = 0, perms = 0664):
633 if os.path.exists(dest) and os.path.isdir(dest):
636 dest_dir = os.path.dirname(dest)
637 if not os.path.exists(dest_dir):
638 umask = os.umask(00000)
639 os.makedirs(dest_dir, 02775)
641 #print "Copying %s to %s..." % (src, dest)
642 if os.path.exists(dest) and os.path.isdir(dest):
643 dest += '/' + os.path.basename(src)
644 # Don't overwrite unless forced to
645 if os.path.exists(dest):
647 raise FileExistsError
649 if not os.access(dest, os.W_OK):
650 raise CantOverwriteError
651 shutil.copy2(src, dest)
652 os.chmod(dest, perms)
654 ################################################################################
657 res = socket.gethostbyaddr(socket.gethostname())
658 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
659 if database_hostname:
660 return database_hostname
664 def which_conf_file ():
665 res = socket.gethostbyaddr(socket.gethostname())
666 # In case we allow local config files per user, try if one exists
667 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
668 homedir = os.getenv("HOME")
669 confpath = os.path.join(homedir, "/etc/dak.conf")
670 if os.path.exists(confpath):
671 apt_pkg.ReadConfigFileISC(Cnf,default_config)
673 # We are still in here, so there is no local config file or we do
674 # not allow local files. Do the normal stuff.
675 if Cnf.get("Config::" + res[0] + "::DakConfig"):
676 return Cnf["Config::" + res[0] + "::DakConfig"]
678 return default_config
680 def which_apt_conf_file ():
681 res = socket.gethostbyaddr(socket.gethostname())
682 # In case we allow local config files per user, try if one exists
683 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
684 homedir = os.getenv("HOME")
685 confpath = os.path.join(homedir, "/etc/dak.conf")
686 if os.path.exists(confpath):
687 apt_pkg.ReadConfigFileISC(Cnf,default_config)
689 if Cnf.get("Config::" + res[0] + "::AptConfig"):
690 return Cnf["Config::" + res[0] + "::AptConfig"]
692 return default_apt_config
694 def which_alias_file():
695 hostname = socket.gethostbyaddr(socket.gethostname())[0]
696 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
697 if os.path.exists(aliasfn):
702 ################################################################################
704 # Escape characters which have meaning to SQL's regex comparison operator ('~')
705 # (woefully incomplete)
708 s = s.replace('+', '\\\\+')
709 s = s.replace('.', '\\\\.')
712 ################################################################################
714 def TemplateSubst(map, filename):
715 """ Perform a substition of template """
716 templatefile = open_file(filename)
717 template = templatefile.read()
719 template = template.replace(x,map[x])
723 ################################################################################
725 def fubar(msg, exit_code=1):
726 sys.stderr.write("E: %s\n" % (msg))
730 sys.stderr.write("W: %s\n" % (msg))
732 ################################################################################
734 # Returns the user name with a laughable attempt at rfc822 conformancy
735 # (read: removing stray periods).
737 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
739 ################################################################################
749 return ("%d%s" % (c, t))
751 ################################################################################
753 def cc_fix_changes (changes):
754 o = changes.get("architecture", "")
756 del changes["architecture"]
757 changes["architecture"] = {}
759 changes["architecture"][j] = 1
761 def changes_compare (a, b):
762 """ Sort by source name, source version, 'have source', and then by filename """
764 a_changes = parse_changes(a)
769 b_changes = parse_changes(b)
773 cc_fix_changes (a_changes)
774 cc_fix_changes (b_changes)
776 # Sort by source name
777 a_source = a_changes.get("source")
778 b_source = b_changes.get("source")
779 q = cmp (a_source, b_source)
783 # Sort by source version
784 a_version = a_changes.get("version", "0")
785 b_version = b_changes.get("version", "0")
786 q = apt_pkg.VersionCompare(a_version, b_version)
790 # Sort by 'have source'
791 a_has_source = a_changes["architecture"].get("source")
792 b_has_source = b_changes["architecture"].get("source")
793 if a_has_source and not b_has_source:
795 elif b_has_source and not a_has_source:
798 # Fall back to sort by filename
801 ################################################################################
803 def find_next_free (dest, too_many=100):
806 while os.path.exists(dest) and extra < too_many:
807 dest = orig_dest + '.' + repr(extra)
809 if extra >= too_many:
810 raise NoFreeFilenameError
813 ################################################################################
815 def result_join (original, sep = '\t'):
817 for i in xrange(len(original)):
818 if original[i] == None:
819 resultlist.append("")
821 resultlist.append(original[i])
822 return sep.join(resultlist)
824 ################################################################################
826 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
828 for line in str.split('\n'):
830 if line or include_blank_lines:
831 out += "%s%s\n" % (prefix, line)
832 # Strip trailing new line
837 ################################################################################
839 def validate_changes_file_arg(filename, require_changes=1):
841 'filename' is either a .changes or .dak file. If 'filename' is a
842 .dak file, it's changed to be the corresponding .changes file. The
843 function then checks if the .changes file a) exists and b) is
844 readable and returns the .changes filename if so. If there's a
845 problem, the next action depends on the option 'require_changes'
848 - If 'require_changes' == -1, errors are ignored and the .changes
849 filename is returned.
850 - If 'require_changes' == 0, a warning is given and 'None' is returned.
851 - If 'require_changes' == 1, a fatal error is raised.
856 orig_filename = filename
857 if filename.endswith(".dak"):
858 filename = filename[:-4]+".changes"
860 if not filename.endswith(".changes"):
861 error = "invalid file type; not a changes file"
863 if not os.access(filename,os.R_OK):
864 if os.path.exists(filename):
865 error = "permission denied"
867 error = "file not found"
870 if require_changes == 1:
871 fubar("%s: %s." % (orig_filename, error))
872 elif require_changes == 0:
873 warn("Skipping %s - %s" % (orig_filename, error))
875 else: # We only care about the .dak file
880 ################################################################################
883 return (arch != "source" and arch != "all")
885 ################################################################################
887 def join_with_commas_and(list):
888 if len(list) == 0: return "nothing"
889 if len(list) == 1: return list[0]
890 return ", ".join(list[:-1]) + " and " + list[-1]
892 ################################################################################
897 (pkg, version, constraint) = atom
899 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
902 pp_deps.append(pp_dep)
903 return " |".join(pp_deps)
905 ################################################################################
910 ################################################################################
912 def parse_args(Options):
913 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
914 # XXX: This should go away and everything which calls it be converted
915 # to use SQLA properly. For now, we'll just fix it not to use
916 # the old Pg interface though
917 session = DBConn().session()
921 for suitename in split_args(Options["Suite"]):
922 suite = get_suite(suitename, session=session)
924 warn("suite '%s' not recognised." % (suitename))
926 suite_ids_list.append(suite.suite_id)
928 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
930 fubar("No valid suite given.")
935 if Options["Component"]:
936 component_ids_list = []
937 for componentname in split_args(Options["Component"]):
938 component = get_component(componentname, session=session)
939 if component is None:
940 warn("component '%s' not recognised." % (componentname))
942 component_ids_list.append(component.component_id)
943 if component_ids_list:
944 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
946 fubar("No valid component given.")
950 # Process architecture
951 con_architectures = ""
953 if Options["Architecture"]:
955 for archname in split_args(Options["Architecture"]):
956 if archname == "source":
959 arch = get_architecture(archname, session=session)
961 warn("architecture '%s' not recognised." % (archname))
963 arch_ids_list.append(arch.arch_id)
965 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
968 fubar("No valid architecture given.")
972 return (con_suites, con_architectures, con_components, check_source)
974 ################################################################################
976 # Inspired(tm) by Bryn Keller's print_exc_plus (See
977 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
980 tb = sys.exc_info()[2]
989 traceback.print_exc()
991 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
992 frame.f_code.co_filename,
994 for key, value in frame.f_locals.items():
995 print "\t%20s = " % key,
999 print "<unable to print>"
1001 ################################################################################
1003 def try_with_debug(function):
1011 ################################################################################
1013 def arch_compare_sw (a, b):
1015 Function for use in sorting lists of architectures.
1017 Sorts normally except that 'source' dominates all others.
1020 if a == "source" and b == "source":
1029 ################################################################################
1031 def split_args (s, dwim=1):
1033 Split command line arguments which can be separated by either commas
1034 or whitespace. If dwim is set, it will complain about string ending
1035 in comma since this usually means someone did 'dak ls -a i386, m68k
1036 foo' or something and the inevitable confusion resulting from 'm68k'
1037 being treated as an argument is undesirable.
1040 if s.find(",") == -1:
1043 if s[-1:] == "," and dwim:
1044 fubar("split_args: found trailing comma, spurious space maybe?")
1047 ################################################################################
1049 def Dict(**dict): return dict
1051 ########################################
1053 def gpgv_get_status_output(cmd, status_read, status_write):
1055 Our very own version of commands.getouputstatus(), hacked to support
1059 cmd = ['/bin/sh', '-c', cmd]
1060 p2cread, p2cwrite = os.pipe()
1061 c2pread, c2pwrite = os.pipe()
1062 errout, errin = os.pipe()
1072 for i in range(3, 256):
1073 if i != status_write:
1079 os.execvp(cmd[0], cmd)
1085 os.dup2(c2pread, c2pwrite)
1086 os.dup2(errout, errin)
1088 output = status = ""
1090 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1093 r = os.read(fd, 8196)
1095 more_data.append(fd)
1096 if fd == c2pwrite or fd == errin:
1098 elif fd == status_read:
1101 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1103 pid, exit_status = os.waitpid(pid, 0)
1105 os.close(status_write)
1106 os.close(status_read)
1116 return output, status, exit_status
1118 ################################################################################
1120 def process_gpgv_output(status):
1121 # Process the status-fd output
1124 for line in status.split('\n'):
1128 split = line.split()
1130 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1132 (gnupg, keyword) = split[:2]
1133 if gnupg != "[GNUPG:]":
1134 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1137 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1138 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1141 keywords[keyword] = args
1143 return (keywords, internal_error)
1145 ################################################################################
1147 def retrieve_key (filename, keyserver=None, keyring=None):
1149 Retrieve the key that signed 'filename' from 'keyserver' and
1150 add it to 'keyring'. Returns nothing on success, or an error message
1154 # Defaults for keyserver and keyring
1156 keyserver = Cnf["Dinstall::KeyServer"]
1158 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1160 # Ensure the filename contains no shell meta-characters or other badness
1161 if not re_taint_free.match(filename):
1162 return "%s: tainted filename" % (filename)
1164 # Invoke gpgv on the file
1165 status_read, status_write = os.pipe()
1166 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1167 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1169 # Process the status-fd output
1170 (keywords, internal_error) = process_gpgv_output(status)
1172 return internal_error
1174 if not keywords.has_key("NO_PUBKEY"):
1175 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1177 fingerprint = keywords["NO_PUBKEY"][0]
1178 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1179 # it'll try to create a lockfile in /dev. A better solution might
1180 # be a tempfile or something.
1181 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1182 % (Cnf["Dinstall::SigningKeyring"])
1183 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1184 % (keyring, keyserver, fingerprint)
1185 (result, output) = commands.getstatusoutput(cmd)
1187 return "'%s' failed with exit code %s" % (cmd, result)
1191 ################################################################################
1193 def gpg_keyring_args(keyrings=None):
1195 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1197 return " ".join(["--keyring %s" % x for x in keyrings])
1199 ################################################################################
1201 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1203 Check the signature of a file and return the fingerprint if the
1204 signature is valid or 'None' if it's not. The first argument is the
1205 filename whose signature should be checked. The second argument is a
1206 reject function and is called when an error is found. The reject()
1207 function must allow for two arguments: the first is the error message,
1208 the second is an optional prefix string. It's possible for reject()
1209 to be called more than once during an invocation of check_signature().
1210 The third argument is optional and is the name of the files the
1211 detached signature applies to. The fourth argument is optional and is
1212 a *list* of keyrings to use. 'autofetch' can either be None, True or
1213 False. If None, the default behaviour specified in the config will be
1217 # Ensure the filename contains no shell meta-characters or other badness
1218 if not re_taint_free.match(sig_filename):
1219 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1222 if data_filename and not re_taint_free.match(data_filename):
1223 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1227 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1229 # Autofetch the signing key if that's enabled
1230 if autofetch == None:
1231 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1233 error_msg = retrieve_key(sig_filename)
1238 # Build the command line
1239 status_read, status_write = os.pipe()
1240 cmd = "gpgv --status-fd %s %s %s %s" % (
1241 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1243 # Invoke gpgv on the file
1244 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1246 # Process the status-fd output
1247 (keywords, internal_error) = process_gpgv_output(status)
1249 # If we failed to parse the status-fd output, let's just whine and bail now
1251 reject("internal error while performing signature check on %s." % (sig_filename))
1252 reject(internal_error, "")
1253 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1257 # Now check for obviously bad things in the processed output
1258 if keywords.has_key("KEYREVOKED"):
1259 reject("The key used to sign %s has been revoked." % (sig_filename))
1261 if keywords.has_key("BADSIG"):
1262 reject("bad signature on %s." % (sig_filename))
1264 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1265 reject("failed to check signature on %s." % (sig_filename))
1267 if keywords.has_key("NO_PUBKEY"):
1268 args = keywords["NO_PUBKEY"]
1271 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1273 if keywords.has_key("BADARMOR"):
1274 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1276 if keywords.has_key("NODATA"):
1277 reject("no signature found in %s." % (sig_filename))
1279 if keywords.has_key("EXPKEYSIG"):
1280 args = keywords["EXPKEYSIG"]
1283 reject("Signature made by expired key 0x%s" % (key))
1285 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1286 args = keywords["KEYEXPIRED"]
1290 if timestamp.count("T") == 0:
1292 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1294 expiredate = "unknown (%s)" % (timestamp)
1296 expiredate = timestamp
1297 reject("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1303 # Next check gpgv exited with a zero return code
1305 reject("gpgv failed while checking %s." % (sig_filename))
1307 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1309 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1312 # Sanity check the good stuff we expect
1313 if not keywords.has_key("VALIDSIG"):
1314 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1317 args = keywords["VALIDSIG"]
1319 reject("internal error while checking signature on %s." % (sig_filename))
1322 fingerprint = args[0]
1323 if not keywords.has_key("GOODSIG"):
1324 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1326 if not keywords.has_key("SIG_ID"):
1327 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1330 # Finally ensure there's not something we don't recognise
1331 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1332 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1333 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1335 for keyword in keywords.keys():
1336 if not known_keywords.has_key(keyword):
1337 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1345 ################################################################################
1347 def gpg_get_key_addresses(fingerprint):
1348 """retreive email addresses from gpg key uids for a given fingerprint"""
1349 addresses = key_uid_email_cache.get(fingerprint)
1350 if addresses != None:
1353 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1354 % (gpg_keyring_args(), fingerprint)
1355 (result, output) = commands.getstatusoutput(cmd)
1357 for l in output.split('\n'):
1358 m = re_gpg_uid.match(l)
1360 addresses.add(m.group(1))
1361 key_uid_email_cache[fingerprint] = addresses
1364 ################################################################################
1366 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1368 def wrap(paragraph, max_length, prefix=""):
1372 words = paragraph.split()
1375 word_size = len(word)
1376 if word_size > max_length:
1378 s += line + '\n' + prefix
1379 s += word + '\n' + prefix
1382 new_length = len(line) + word_size + 1
1383 if new_length > max_length:
1384 s += line + '\n' + prefix
1397 ################################################################################
1399 def clean_symlink (src, dest, root):
1401 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1404 src = src.replace(root, '', 1)
1405 dest = dest.replace(root, '', 1)
1406 dest = os.path.dirname(dest)
1407 new_src = '../' * len(dest.split('/'))
1408 return new_src + src
1410 ################################################################################
1412 def temp_filename(directory=None, prefix="dak", suffix=""):
1414 Return a secure and unique filename by pre-creating it.
1415 If 'directory' is non-null, it will be the directory the file is pre-created in.
1416 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1417 If 'suffix' is non-null, the filename will end with it.
1419 Returns a pair (fd, name).
1422 return tempfile.mkstemp(suffix, prefix, directory)
1424 ################################################################################
1426 def temp_dirname(parent=None, prefix="dak", suffix=""):
1428 Return a secure and unique directory by pre-creating it.
1429 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1430 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1431 If 'suffix' is non-null, the filename will end with it.
1433 Returns a pathname to the new directory
1436 return tempfile.mkdtemp(suffix, prefix, parent)
1438 ################################################################################
1440 def is_email_alias(email):
1441 """ checks if the user part of the email is listed in the alias file """
1443 if alias_cache == None:
1444 aliasfn = which_alias_file()
1447 for l in open(aliasfn):
1448 alias_cache.add(l.split(':')[0])
1449 uid = email.split('@')[0]
1450 return uid in alias_cache
1452 ################################################################################
1454 def get_changes_files(dir):
1456 Takes a directory and lists all .changes files in it (as well as chdir'ing
1457 to the directory; this is due to broken behaviour on the part of p-u/p-a
1458 when you're not in the right place)
1460 Returns a list of filenames
1463 # Much of the rest of p-u/p-a depends on being in the right place
1465 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1467 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1469 return changes_files
1471 ################################################################################
1475 Cnf = apt_pkg.newConfiguration()
1476 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1478 if which_conf_file() != default_config:
1479 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1481 ###############################################################################