2 # vim:set et ts=4 sw=4:
5 # Copyright (C) 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
7 ################################################################################
9 # This program is free software; you can redistribute it and/or modify
10 # it under the terms of the GNU General Public License as published by
11 # the Free Software Foundation; either version 2 of the License, or
12 # (at your option) any later version.
14 # This program is distributed in the hope that it will be useful,
15 # but WITHOUT ANY WARRANTY; without even the implied warranty of
16 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 # GNU General Public License for more details.
19 # You should have received a copy of the GNU General Public License
20 # along with this program; if not, write to the Free Software
21 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
23 ################################################################################
25 import codecs, commands, email.Header, os, pwd, re, select, socket, shutil, \
26 sys, tempfile, traceback, stat
29 from dak_exceptions import *
31 ################################################################################
33 re_comments = re.compile(r"\#.*")
34 re_no_epoch = re.compile(r"^\d+\:")
35 re_no_revision = re.compile(r"-[^-]+$")
36 re_arch_from_filename = re.compile(r"/binary-[^/]+/")
37 re_extract_src_version = re.compile (r"(\S+)\s*\((.*)\)")
38 re_isadeb = re.compile (r"(.+?)_(.+?)_(.+)\.u?deb$")
39 re_issource = re.compile (r"(.+)_(.+?)\.(orig\.tar\.gz|diff\.gz|tar\.gz|dsc)$")
41 re_single_line_field = re.compile(r"^(\S*)\s*:\s*(.*)")
42 re_multi_line_field = re.compile(r"^\s(.*)")
43 re_taint_free = re.compile(r"^[-+~/\.\w]+$")
45 re_parse_maintainer = re.compile(r"^\s*(\S.*\S)\s*\<([^\>]+)\>")
46 re_gpg_uid = re.compile('^uid.*<([^>]*)>')
48 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
49 re_verwithext = re.compile(r"^(\d+)(?:\.(\d+))(?:\s+\((\S+)\))?$")
51 re_srchasver = re.compile(r"^(\S+)\s+\((\S+)\)$")
53 default_config = "/etc/dak/dak.conf"
54 default_apt_config = "/etc/dak/apt.conf"
57 key_uid_email_cache = {}
59 # (hashname, function, earliest_changes_version)
60 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
61 ("sha256", apt_pkg.sha256sum, (1, 8))]
63 ################################################################################
65 def open_file(filename, mode='r'):
67 f = open(filename, mode)
69 raise CantOpenError, filename
72 ################################################################################
74 def our_raw_input(prompt=""):
76 sys.stdout.write(prompt)
82 sys.stderr.write("\nUser interrupt (^D).\n")
85 ################################################################################
87 def extract_component_from_section(section):
90 if section.find('/') != -1:
91 component = section.split('/')[0]
93 # Expand default component
95 if Cnf.has_key("Component::%s" % section):
100 return (section, component)
102 ################################################################################
104 def parse_deb822(contents, signing_rules=0):
108 # Split the lines in the input, keeping the linebreaks.
109 lines = contents.splitlines(True)
112 raise ParseChangesError, "[Empty changes file]"
114 # Reindex by line number so we can easily verify the format of
120 indexed_lines[index] = line[:-1]
124 num_of_lines = len(indexed_lines.keys())
127 while index < num_of_lines:
129 line = indexed_lines[index]
131 if signing_rules == 1:
133 if index > num_of_lines:
134 raise InvalidDscError, index
135 line = indexed_lines[index]
136 if not line.startswith("-----BEGIN PGP SIGNATURE"):
137 raise InvalidDscError, index
142 if line.startswith("-----BEGIN PGP SIGNATURE"):
144 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
146 if signing_rules == 1:
147 while index < num_of_lines and line != "":
149 line = indexed_lines[index]
151 # If we're not inside the signed data, don't process anything
152 if signing_rules >= 0 and not inside_signature:
154 slf = re_single_line_field.match(line)
156 field = slf.groups()[0].lower()
157 changes[field] = slf.groups()[1]
161 changes[field] += '\n'
163 mlf = re_multi_line_field.match(line)
166 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
167 if first == 1 and changes[field] != "":
168 changes[field] += '\n'
170 changes[field] += mlf.groups()[0] + '\n'
174 if signing_rules == 1 and inside_signature:
175 raise InvalidDscError, index
177 changes["filecontents"] = "".join(lines)
179 if changes.has_key("source"):
180 # Strip the source version in brackets from the source field,
181 # put it in the "source-version" field instead.
182 srcver = re_srchasver.search(changes["source"])
184 changes["source"] = srcver.group(1)
185 changes["source-version"] = srcver.group(2)
188 raise ParseChangesError, error
192 ################################################################################
194 def parse_changes(filename, signing_rules=0):
195 """Parses a changes file and returns a dictionary where each field is a
196 key. The mandatory first argument is the filename of the .changes
199 signing_rules is an optional argument:
201 o If signing_rules == -1, no signature is required.
202 o If signing_rules == 0 (the default), a signature is required.
203 o If signing_rules == 1, it turns on the same strict format checking
206 The rules for (signing_rules == 1)-mode are:
208 o The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
209 followed by any PGP header data and must end with a blank line.
211 o The data section must end with a blank line and must be followed by
212 "-----BEGIN PGP SIGNATURE-----".
215 changes_in = open_file(filename)
216 content = changes_in.read()
218 return parse_deb822(content, signing_rules)
220 ################################################################################
222 def hash_key(hashname):
223 return '%ssum' % hashname
225 ################################################################################
227 def create_hash(where, files, hashname, hashfunc):
228 """create_hash extends the passed files dict with the given hash by
229 iterating over all files on disk and passing them to the hashing
233 for f in files.keys():
235 file_handle = open_file(f)
236 except CantOpenError:
237 rejmsg.append("Could not open file %s for checksumming" % (f))
239 files[f][hash_key(hashname)] = hashfunc(file_handle)
244 ################################################################################
246 def check_hash(where, files, hashname, hashfunc):
247 """check_hash checks the given hash in the files dict against the actual
248 files on disk. The hash values need to be present consistently in
249 all file entries. It does not modify its input in any way."""
252 for f in files.keys():
256 file_handle = open_file(f)
258 # Check for the hash entry, to not trigger a KeyError.
259 if not files[f].has_key(hash_key(hashname)):
260 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
264 # Actually check the hash for correctness.
265 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
266 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
268 except CantOpenError:
269 # TODO: This happens when the file is in the pool.
270 # warn("Cannot open file %s" % f)
277 ################################################################################
279 def check_size(where, files):
280 """check_size checks the file sizes in the passed files dict against the
284 for f in files.keys():
289 # TODO: This happens when the file is in the pool.
293 actual_size = entry[stat.ST_SIZE]
294 size = int(files[f]["size"])
295 if size != actual_size:
296 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
297 % (f, actual_size, size, where))
300 ################################################################################
302 def check_hash_fields(what, manifest):
303 """check_hash_fields ensures that there are no checksum fields in the
304 given dict that we do not know about."""
307 hashes = map(lambda x: x[0], known_hashes)
308 for field in manifest:
309 if field.startswith("checksums-"):
310 hashname = field.split("-",1)[1]
311 if hashname not in hashes:
312 rejmsg.append("Unsupported checksum field for %s "\
313 "in %s" % (hashname, what))
316 ################################################################################
318 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
319 if format >= version:
320 # The version should contain the specified hash.
323 # Import hashes from the changes
324 rejmsg = parse_checksums(".changes", files, changes, hashname)
328 # We need to calculate the hash because it can't possibly
331 return func(".changes", files, hashname, hashfunc)
333 # We could add the orig which might be in the pool to the files dict to
334 # access the checksums easily.
336 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
337 """ensure_dsc_hashes' task is to ensure that each and every *present* hash
338 in the dsc is correct, i.e. identical to the changes file and if necessary
339 the pool. The latter task is delegated to check_hash."""
342 if not dsc.has_key('Checksums-%s' % (hashname,)):
344 # Import hashes from the dsc
345 parse_checksums(".dsc", dsc_files, dsc, hashname)
347 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
350 ################################################################################
352 def ensure_hashes(changes, dsc, files, dsc_files):
355 # Make sure we recognise the format of the Files: field in the .changes
356 format = changes.get("format", "0.0").split(".", 1)
358 format = int(format[0]), int(format[1])
360 format = int(float(format[0])), 0
362 # We need to deal with the original changes blob, as the fields we need
363 # might not be in the changes dict serialised into the .dak anymore.
364 orig_changes = parse_deb822(changes['filecontents'])
366 # Copy the checksums over to the current changes dict. This will keep
367 # the existing modifications to it intact.
368 for field in orig_changes:
369 if field.startswith('checksums-'):
370 changes[field] = orig_changes[field]
372 # Check for unsupported hashes
373 rejmsg.extend(check_hash_fields(".changes", changes))
374 rejmsg.extend(check_hash_fields(".dsc", dsc))
376 # We have to calculate the hash if we have an earlier changes version than
377 # the hash appears in rather than require it exist in the changes file
378 for hashname, hashfunc, version in known_hashes:
379 rejmsg.extend(_ensure_changes_hash(changes, format, version, files,
381 if "source" in changes["architecture"]:
382 rejmsg.extend(_ensure_dsc_hash(dsc, dsc_files, hashname,
387 def parse_checksums(where, files, manifest, hashname):
389 field = 'checksums-%s' % hashname
390 if not field in manifest:
392 input = manifest[field]
393 for line in input.split('\n'):
396 hash, size, file = line.strip().split(' ')
397 if not files.has_key(file):
398 # TODO: check for the file's entry in the original files dict, not
399 # the one modified by (auto)byhand and other weird stuff
400 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
401 # (file, hashname, where))
403 if not files[file]["size"] == size:
404 rejmsg.append("%s: size differs for files and checksums-%s entry "\
405 "in %s" % (file, hashname, where))
407 files[file][hash_key(hashname)] = hash
408 for f in files.keys():
409 if not files[f].has_key(hash_key(hashname)):
410 rejmsg.append("%s: no entry in checksums-%s in %s" % (file,
414 ################################################################################
416 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
418 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
421 # Make sure we have a Files: field to parse...
422 if not changes.has_key(field):
423 raise NoFilesFieldError
425 # Make sure we recognise the format of the Files: field
426 format = re_verwithext.search(changes.get("format", "0.0"))
428 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
430 format = format.groups()
431 if format[1] == None:
432 format = int(float(format[0])), 0, format[2]
434 format = int(format[0]), int(format[1]), format[2]
435 if format[2] == None:
439 # format = (1,0) are the only formats we currently accept,
440 # format = (0,0) are missing format headers of which we still
441 # have some in the archive.
442 if format != (1,0) and format != (0,0):
443 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
445 if (format < (1,5) or format > (1,8)):
446 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
447 if field != "files" and format < (1,8):
448 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
450 includes_section = (not is_a_dsc) and field == "files"
452 # Parse each entry/line:
453 for i in changes[field].split('\n'):
457 section = priority = ""
460 (md5, size, section, priority, name) = s
462 (md5, size, name) = s
464 raise ParseChangesError, i
471 (section, component) = extract_component_from_section(section)
473 files[name] = Dict(size=size, section=section,
474 priority=priority, component=component)
475 files[name][hashname] = md5
479 ################################################################################
481 def force_to_utf8(s):
482 """Forces a string to UTF-8. If the string isn't already UTF-8,
483 it's assumed to be ISO-8859-1."""
488 latin1_s = unicode(s,'iso8859-1')
489 return latin1_s.encode('utf-8')
491 def rfc2047_encode(s):
492 """Encodes a (header) string per RFC2047 if necessary. If the
493 string is neither ASCII nor UTF-8, it's assumed to be ISO-8859-1."""
495 codecs.lookup('ascii')[1](s)
500 codecs.lookup('utf-8')[1](s)
501 h = email.Header.Header(s, 'utf-8', 998)
504 h = email.Header.Header(s, 'iso-8859-1', 998)
507 ################################################################################
509 # <Culus> 'The standard sucks, but my tool is supposed to interoperate
510 # with it. I know - I'll fix the suckage and make things
513 def fix_maintainer (maintainer):
514 """Parses a Maintainer or Changed-By field and returns:
515 (1) an RFC822 compatible version,
516 (2) an RFC2047 compatible version,
520 The name is forced to UTF-8 for both (1) and (3). If the name field
521 contains '.' or ',' (as allowed by Debian policy), (1) and (2) are
522 switched to 'email (name)' format."""
523 maintainer = maintainer.strip()
525 return ('', '', '', '')
527 if maintainer.find("<") == -1:
530 elif (maintainer[0] == "<" and maintainer[-1:] == ">"):
531 email = maintainer[1:-1]
534 m = re_parse_maintainer.match(maintainer)
536 raise ParseMaintError, "Doesn't parse as a valid Maintainer field."
540 # Get an RFC2047 compliant version of the name
541 rfc2047_name = rfc2047_encode(name)
543 # Force the name to be UTF-8
544 name = force_to_utf8(name)
546 if name.find(',') != -1 or name.find('.') != -1:
547 rfc822_maint = "%s (%s)" % (email, name)
548 rfc2047_maint = "%s (%s)" % (email, rfc2047_name)
550 rfc822_maint = "%s <%s>" % (name, email)
551 rfc2047_maint = "%s <%s>" % (rfc2047_name, email)
553 if email.find("@") == -1 and email.find("buildd_") != 0:
554 raise ParseMaintError, "No @ found in email address part."
556 return (rfc822_maint, rfc2047_maint, name, email)
558 ################################################################################
560 # sendmail wrapper, takes _either_ a message string or a file as arguments
561 def send_mail (message, filename=""):
562 # If we've been passed a string dump it into a temporary file
564 filename = tempfile.mktemp()
565 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, 0700)
566 os.write (fd, message)
570 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
572 raise SendmailFailedError, output
574 # Clean up any temporary files
578 ################################################################################
580 def poolify (source, component):
583 if source[:3] == "lib":
584 return component + source[:4] + '/' + source + '/'
586 return component + source[:1] + '/' + source + '/'
588 ################################################################################
590 def move (src, dest, overwrite = 0, perms = 0664):
591 if os.path.exists(dest) and os.path.isdir(dest):
594 dest_dir = os.path.dirname(dest)
595 if not os.path.exists(dest_dir):
596 umask = os.umask(00000)
597 os.makedirs(dest_dir, 02775)
599 #print "Moving %s to %s..." % (src, dest)
600 if os.path.exists(dest) and os.path.isdir(dest):
601 dest += '/' + os.path.basename(src)
602 # Don't overwrite unless forced to
603 if os.path.exists(dest):
605 fubar("Can't move %s to %s - file already exists." % (src, dest))
607 if not os.access(dest, os.W_OK):
608 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
609 shutil.copy2(src, dest)
610 os.chmod(dest, perms)
613 def copy (src, dest, overwrite = 0, perms = 0664):
614 if os.path.exists(dest) and os.path.isdir(dest):
617 dest_dir = os.path.dirname(dest)
618 if not os.path.exists(dest_dir):
619 umask = os.umask(00000)
620 os.makedirs(dest_dir, 02775)
622 #print "Copying %s to %s..." % (src, dest)
623 if os.path.exists(dest) and os.path.isdir(dest):
624 dest += '/' + os.path.basename(src)
625 # Don't overwrite unless forced to
626 if os.path.exists(dest):
628 raise FileExistsError
630 if not os.access(dest, os.W_OK):
631 raise CantOverwriteError
632 shutil.copy2(src, dest)
633 os.chmod(dest, perms)
635 ################################################################################
638 res = socket.gethostbyaddr(socket.gethostname())
639 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
640 if database_hostname:
641 return database_hostname
645 def which_conf_file ():
646 res = socket.gethostbyaddr(socket.gethostname())
647 if Cnf.get("Config::" + res[0] + "::DakConfig"):
648 return Cnf["Config::" + res[0] + "::DakConfig"]
650 return default_config
652 def which_apt_conf_file ():
653 res = socket.gethostbyaddr(socket.gethostname())
654 if Cnf.get("Config::" + res[0] + "::AptConfig"):
655 return Cnf["Config::" + res[0] + "::AptConfig"]
657 return default_apt_config
659 def which_alias_file():
660 hostname = socket.gethostbyaddr(socket.gethostname())[0]
661 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
662 if os.path.exists(aliasfn):
667 ################################################################################
669 # Escape characters which have meaning to SQL's regex comparison operator ('~')
670 # (woefully incomplete)
673 s = s.replace('+', '\\\\+')
674 s = s.replace('.', '\\\\.')
677 ################################################################################
679 # Perform a substition of template
680 def TemplateSubst(map, filename):
681 file = open_file(filename)
682 template = file.read()
684 template = template.replace(x,map[x])
688 ################################################################################
690 def fubar(msg, exit_code=1):
691 sys.stderr.write("E: %s\n" % (msg))
695 sys.stderr.write("W: %s\n" % (msg))
697 ################################################################################
699 # Returns the user name with a laughable attempt at rfc822 conformancy
700 # (read: removing stray periods).
702 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
704 ################################################################################
714 return ("%d%s" % (c, t))
716 ################################################################################
718 def cc_fix_changes (changes):
719 o = changes.get("architecture", "")
721 del changes["architecture"]
722 changes["architecture"] = {}
724 changes["architecture"][j] = 1
726 # Sort by source name, source version, 'have source', and then by filename
727 def changes_compare (a, b):
729 a_changes = parse_changes(a)
734 b_changes = parse_changes(b)
738 cc_fix_changes (a_changes)
739 cc_fix_changes (b_changes)
741 # Sort by source name
742 a_source = a_changes.get("source")
743 b_source = b_changes.get("source")
744 q = cmp (a_source, b_source)
748 # Sort by source version
749 a_version = a_changes.get("version", "0")
750 b_version = b_changes.get("version", "0")
751 q = apt_pkg.VersionCompare(a_version, b_version)
755 # Sort by 'have source'
756 a_has_source = a_changes["architecture"].get("source")
757 b_has_source = b_changes["architecture"].get("source")
758 if a_has_source and not b_has_source:
760 elif b_has_source and not a_has_source:
763 # Fall back to sort by filename
766 ################################################################################
768 def find_next_free (dest, too_many=100):
771 while os.path.exists(dest) and extra < too_many:
772 dest = orig_dest + '.' + repr(extra)
774 if extra >= too_many:
775 raise NoFreeFilenameError
778 ################################################################################
780 def result_join (original, sep = '\t'):
782 for i in xrange(len(original)):
783 if original[i] == None:
786 list.append(original[i])
787 return sep.join(list)
789 ################################################################################
791 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
793 for line in str.split('\n'):
795 if line or include_blank_lines:
796 out += "%s%s\n" % (prefix, line)
797 # Strip trailing new line
802 ################################################################################
804 def validate_changes_file_arg(filename, require_changes=1):
805 """'filename' is either a .changes or .dak file. If 'filename' is a
806 .dak file, it's changed to be the corresponding .changes file. The
807 function then checks if the .changes file a) exists and b) is
808 readable and returns the .changes filename if so. If there's a
809 problem, the next action depends on the option 'require_changes'
812 o If 'require_changes' == -1, errors are ignored and the .changes
813 filename is returned.
814 o If 'require_changes' == 0, a warning is given and 'None' is returned.
815 o If 'require_changes' == 1, a fatal error is raised.
819 orig_filename = filename
820 if filename.endswith(".dak"):
821 filename = filename[:-4]+".changes"
823 if not filename.endswith(".changes"):
824 error = "invalid file type; not a changes file"
826 if not os.access(filename,os.R_OK):
827 if os.path.exists(filename):
828 error = "permission denied"
830 error = "file not found"
833 if require_changes == 1:
834 fubar("%s: %s." % (orig_filename, error))
835 elif require_changes == 0:
836 warn("Skipping %s - %s" % (orig_filename, error))
838 else: # We only care about the .dak file
843 ################################################################################
846 return (arch != "source" and arch != "all")
848 ################################################################################
850 def join_with_commas_and(list):
851 if len(list) == 0: return "nothing"
852 if len(list) == 1: return list[0]
853 return ", ".join(list[:-1]) + " and " + list[-1]
855 ################################################################################
860 (pkg, version, constraint) = atom
862 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
865 pp_deps.append(pp_dep)
866 return " |".join(pp_deps)
868 ################################################################################
873 ################################################################################
875 # Handle -a, -c and -s arguments; returns them as SQL constraints
876 def parse_args(Options):
880 for suite in split_args(Options["Suite"]):
881 suite_id = database.get_suite_id(suite)
883 warn("suite '%s' not recognised." % (suite))
885 suite_ids_list.append(suite_id)
887 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
889 fubar("No valid suite given.")
894 if Options["Component"]:
895 component_ids_list = []
896 for component in split_args(Options["Component"]):
897 component_id = database.get_component_id(component)
898 if component_id == -1:
899 warn("component '%s' not recognised." % (component))
901 component_ids_list.append(component_id)
902 if component_ids_list:
903 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
905 fubar("No valid component given.")
909 # Process architecture
910 con_architectures = ""
911 if Options["Architecture"]:
914 for architecture in split_args(Options["Architecture"]):
915 if architecture == "source":
918 architecture_id = database.get_architecture_id(architecture)
919 if architecture_id == -1:
920 warn("architecture '%s' not recognised." % (architecture))
922 arch_ids_list.append(architecture_id)
924 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
927 fubar("No valid architecture given.")
931 return (con_suites, con_architectures, con_components, check_source)
933 ################################################################################
935 # Inspired(tm) by Bryn Keller's print_exc_plus (See
936 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
939 tb = sys.exc_info()[2]
948 traceback.print_exc()
950 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
951 frame.f_code.co_filename,
953 for key, value in frame.f_locals.items():
954 print "\t%20s = " % key,
958 print "<unable to print>"
960 ################################################################################
962 def try_with_debug(function):
970 ################################################################################
972 # Function for use in sorting lists of architectures.
973 # Sorts normally except that 'source' dominates all others.
975 def arch_compare_sw (a, b):
976 if a == "source" and b == "source":
985 ################################################################################
987 # Split command line arguments which can be separated by either commas
988 # or whitespace. If dwim is set, it will complain about string ending
989 # in comma since this usually means someone did 'dak ls -a i386, m68k
990 # foo' or something and the inevitable confusion resulting from 'm68k'
991 # being treated as an argument is undesirable.
993 def split_args (s, dwim=1):
994 if s.find(",") == -1:
997 if s[-1:] == "," and dwim:
998 fubar("split_args: found trailing comma, spurious space maybe?")
1001 ################################################################################
1003 def Dict(**dict): return dict
1005 ########################################
1007 # Our very own version of commands.getouputstatus(), hacked to support
1009 def gpgv_get_status_output(cmd, status_read, status_write):
1010 cmd = ['/bin/sh', '-c', cmd]
1011 p2cread, p2cwrite = os.pipe()
1012 c2pread, c2pwrite = os.pipe()
1013 errout, errin = os.pipe()
1023 for i in range(3, 256):
1024 if i != status_write:
1030 os.execvp(cmd[0], cmd)
1036 os.dup2(c2pread, c2pwrite)
1037 os.dup2(errout, errin)
1039 output = status = ""
1041 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1044 r = os.read(fd, 8196)
1046 more_data.append(fd)
1047 if fd == c2pwrite or fd == errin:
1049 elif fd == status_read:
1052 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1054 pid, exit_status = os.waitpid(pid, 0)
1056 os.close(status_write)
1057 os.close(status_read)
1067 return output, status, exit_status
1069 ################################################################################
1071 def process_gpgv_output(status):
1072 # Process the status-fd output
1075 for line in status.split('\n'):
1079 split = line.split()
1081 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1083 (gnupg, keyword) = split[:2]
1084 if gnupg != "[GNUPG:]":
1085 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1088 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1089 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1092 keywords[keyword] = args
1094 return (keywords, internal_error)
1096 ################################################################################
1098 def retrieve_key (filename, keyserver=None, keyring=None):
1099 """Retrieve the key that signed 'filename' from 'keyserver' and
1100 add it to 'keyring'. Returns nothing on success, or an error message
1103 # Defaults for keyserver and keyring
1105 keyserver = Cnf["Dinstall::KeyServer"]
1107 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1109 # Ensure the filename contains no shell meta-characters or other badness
1110 if not re_taint_free.match(filename):
1111 return "%s: tainted filename" % (filename)
1113 # Invoke gpgv on the file
1114 status_read, status_write = os.pipe();
1115 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1116 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1118 # Process the status-fd output
1119 (keywords, internal_error) = process_gpgv_output(status)
1121 return internal_error
1123 if not keywords.has_key("NO_PUBKEY"):
1124 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1126 fingerprint = keywords["NO_PUBKEY"][0]
1127 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1128 # it'll try to create a lockfile in /dev. A better solution might
1129 # be a tempfile or something.
1130 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1131 % (Cnf["Dinstall::SigningKeyring"])
1132 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1133 % (keyring, keyserver, fingerprint)
1134 (result, output) = commands.getstatusoutput(cmd)
1136 return "'%s' failed with exit code %s" % (cmd, result)
1140 ################################################################################
1142 def gpg_keyring_args(keyrings=None):
1144 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1146 return " ".join(["--keyring %s" % x for x in keyrings])
1148 ################################################################################
1150 def check_signature (sig_filename, reject, data_filename="", keyrings=None, autofetch=None):
1151 """Check the signature of a file and return the fingerprint if the
1152 signature is valid or 'None' if it's not. The first argument is the
1153 filename whose signature should be checked. The second argument is a
1154 reject function and is called when an error is found. The reject()
1155 function must allow for two arguments: the first is the error message,
1156 the second is an optional prefix string. It's possible for reject()
1157 to be called more than once during an invocation of check_signature().
1158 The third argument is optional and is the name of the files the
1159 detached signature applies to. The fourth argument is optional and is
1160 a *list* of keyrings to use. 'autofetch' can either be None, True or
1161 False. If None, the default behaviour specified in the config will be
1164 # Ensure the filename contains no shell meta-characters or other badness
1165 if not re_taint_free.match(sig_filename):
1166 reject("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1169 if data_filename and not re_taint_free.match(data_filename):
1170 reject("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1174 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1176 # Autofetch the signing key if that's enabled
1177 if autofetch == None:
1178 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1180 error_msg = retrieve_key(sig_filename)
1185 # Build the command line
1186 status_read, status_write = os.pipe();
1187 cmd = "gpgv --status-fd %s %s %s %s" % (
1188 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1190 # Invoke gpgv on the file
1191 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1193 # Process the status-fd output
1194 (keywords, internal_error) = process_gpgv_output(status)
1196 # If we failed to parse the status-fd output, let's just whine and bail now
1198 reject("internal error while performing signature check on %s." % (sig_filename))
1199 reject(internal_error, "")
1200 reject("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1204 # Now check for obviously bad things in the processed output
1205 if keywords.has_key("KEYREVOKED"):
1206 reject("The key used to sign %s has been revoked." % (sig_filename))
1208 if keywords.has_key("BADSIG"):
1209 reject("bad signature on %s." % (sig_filename))
1211 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1212 reject("failed to check signature on %s." % (sig_filename))
1214 if keywords.has_key("NO_PUBKEY"):
1215 args = keywords["NO_PUBKEY"]
1218 reject("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1220 if keywords.has_key("BADARMOR"):
1221 reject("ASCII armour of signature was corrupt in %s." % (sig_filename))
1223 if keywords.has_key("NODATA"):
1224 reject("no signature found in %s." % (sig_filename))
1226 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1227 args = keywords["KEYEXPIRED"]
1230 reject("The key (0x%s) used to sign %s has expired." % (key, sig_filename))
1236 # Next check gpgv exited with a zero return code
1238 reject("gpgv failed while checking %s." % (sig_filename))
1240 reject(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1242 reject(prefix_multi_line_string(output, " [GPG output:] "), "")
1245 # Sanity check the good stuff we expect
1246 if not keywords.has_key("VALIDSIG"):
1247 reject("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1250 args = keywords["VALIDSIG"]
1252 reject("internal error while checking signature on %s." % (sig_filename))
1255 fingerprint = args[0]
1256 if not keywords.has_key("GOODSIG"):
1257 reject("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1259 if not keywords.has_key("SIG_ID"):
1260 reject("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1263 # Finally ensure there's not something we don't recognise
1264 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1265 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1266 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1268 for keyword in keywords.keys():
1269 if not known_keywords.has_key(keyword):
1270 reject("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1278 ################################################################################
1280 def gpg_get_key_addresses(fingerprint):
1281 """retreive email addresses from gpg key uids for a given fingerprint"""
1282 addresses = key_uid_email_cache.get(fingerprint)
1283 if addresses != None:
1286 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1287 % (gpg_keyring_args(), fingerprint)
1288 (result, output) = commands.getstatusoutput(cmd)
1290 for l in output.split('\n'):
1291 m = re_gpg_uid.match(l)
1293 addresses.add(m.group(1))
1294 key_uid_email_cache[fingerprint] = addresses
1297 ################################################################################
1299 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1301 def wrap(paragraph, max_length, prefix=""):
1305 words = paragraph.split()
1308 word_size = len(word)
1309 if word_size > max_length:
1311 s += line + '\n' + prefix
1312 s += word + '\n' + prefix
1315 new_length = len(line) + word_size + 1
1316 if new_length > max_length:
1317 s += line + '\n' + prefix
1330 ################################################################################
1332 # Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1333 # Returns fixed 'src'
1334 def clean_symlink (src, dest, root):
1335 src = src.replace(root, '', 1)
1336 dest = dest.replace(root, '', 1)
1337 dest = os.path.dirname(dest)
1338 new_src = '../' * len(dest.split('/'))
1339 return new_src + src
1341 ################################################################################
1343 def temp_filename(directory=None, dotprefix=None, perms=0700):
1344 """Return a secure and unique filename by pre-creating it.
1345 If 'directory' is non-null, it will be the directory the file is pre-created in.
1346 If 'dotprefix' is non-null, the filename will be prefixed with a '.'."""
1349 old_tempdir = tempfile.tempdir
1350 tempfile.tempdir = directory
1352 filename = tempfile.mktemp()
1355 filename = "%s/.%s" % (os.path.dirname(filename), os.path.basename(filename))
1356 fd = os.open(filename, os.O_RDWR|os.O_CREAT|os.O_EXCL, perms)
1360 tempfile.tempdir = old_tempdir
1364 ################################################################################
1366 # checks if the user part of the email is listed in the alias file
1368 def is_email_alias(email):
1370 if alias_cache == None:
1371 aliasfn = which_alias_file()
1374 for l in open(aliasfn):
1375 alias_cache.add(l.split(':')[0])
1376 uid = email.split('@')[0]
1377 return uid in alias_cache
1379 ################################################################################
1383 Cnf = apt_pkg.newConfiguration()
1384 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1386 if which_conf_file() != default_config:
1387 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1389 ################################################################################