2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
44 from dbconn import DBConn, get_architecture, get_component, get_suite
45 from dak_exceptions import *
46 from textutils import fix_maintainer
47 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
48 re_multi_line_field, re_srchasver, re_verwithext, \
49 re_parse_maintainer, re_taint_free, re_gpg_uid, \
50 re_re_mark, re_whitespace_comment, re_issource
52 from srcformats import srcformats
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
68 def dak_getstatusoutput(cmd):
69 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
70 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
72 output = "".join(pipe.stdout.readlines())
79 commands.getstatusoutput = dak_getstatusoutput
81 ################################################################################
84 """ Escape html chars """
85 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
87 ################################################################################
89 def open_file(filename, mode='r'):
91 Open C{file}, return fileobject.
93 @type filename: string
94 @param filename: path/filename to open
97 @param mode: open mode
100 @return: open fileobject
102 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
106 f = open(filename, mode)
108 raise CantOpenError, filename
111 ################################################################################
113 def our_raw_input(prompt=""):
115 sys.stdout.write(prompt)
121 sys.stderr.write("\nUser interrupt (^D).\n")
124 ################################################################################
126 def extract_component_from_section(section):
129 if section.find('/') != -1:
130 component = section.split('/')[0]
132 # Expand default component
134 if Cnf.has_key("Component::%s" % section):
139 return (section, component)
141 ################################################################################
143 def parse_deb822(contents, signing_rules=0):
147 # Split the lines in the input, keeping the linebreaks.
148 lines = contents.splitlines(True)
151 raise ParseChangesError, "[Empty changes file]"
153 # Reindex by line number so we can easily verify the format of
159 indexed_lines[index] = line[:-1]
163 num_of_lines = len(indexed_lines.keys())
166 while index < num_of_lines:
168 line = indexed_lines[index]
170 if signing_rules == 1:
172 if index > num_of_lines:
173 raise InvalidDscError, index
174 line = indexed_lines[index]
175 if not line.startswith("-----BEGIN PGP SIGNATURE"):
176 raise InvalidDscError, index
181 if line.startswith("-----BEGIN PGP SIGNATURE"):
183 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
185 if signing_rules == 1:
186 while index < num_of_lines and line != "":
188 line = indexed_lines[index]
190 # If we're not inside the signed data, don't process anything
191 if signing_rules >= 0 and not inside_signature:
193 slf = re_single_line_field.match(line)
195 field = slf.groups()[0].lower()
196 changes[field] = slf.groups()[1]
200 changes[field] += '\n'
202 mlf = re_multi_line_field.match(line)
205 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
206 if first == 1 and changes[field] != "":
207 changes[field] += '\n'
209 changes[field] += mlf.groups()[0] + '\n'
213 if signing_rules == 1 and inside_signature:
214 raise InvalidDscError, index
216 changes["filecontents"] = "".join(lines)
218 if changes.has_key("source"):
219 # Strip the source version in brackets from the source field,
220 # put it in the "source-version" field instead.
221 srcver = re_srchasver.search(changes["source"])
223 changes["source"] = srcver.group(1)
224 changes["source-version"] = srcver.group(2)
227 raise ParseChangesError, error
231 ################################################################################
233 def parse_changes(filename, signing_rules=0):
235 Parses a changes file and returns a dictionary where each field is a
236 key. The mandatory first argument is the filename of the .changes
239 signing_rules is an optional argument:
241 - If signing_rules == -1, no signature is required.
242 - If signing_rules == 0 (the default), a signature is required.
243 - If signing_rules == 1, it turns on the same strict format checking
246 The rules for (signing_rules == 1)-mode are:
248 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
249 followed by any PGP header data and must end with a blank line.
251 - The data section must end with a blank line and must be followed by
252 "-----BEGIN PGP SIGNATURE-----".
255 changes_in = open_file(filename)
256 content = changes_in.read()
259 unicode(content, 'utf-8')
261 raise ChangesUnicodeError, "Changes file not proper utf-8"
262 return parse_deb822(content, signing_rules)
264 ################################################################################
266 def hash_key(hashname):
267 return '%ssum' % hashname
269 ################################################################################
271 def create_hash(where, files, hashname, hashfunc):
273 create_hash extends the passed files dict with the given hash by
274 iterating over all files on disk and passing them to the hashing
279 for f in files.keys():
281 file_handle = open_file(f)
282 except CantOpenError:
283 rejmsg.append("Could not open file %s for checksumming" % (f))
286 files[f][hash_key(hashname)] = hashfunc(file_handle)
291 ################################################################################
293 def check_hash(where, files, hashname, hashfunc):
295 check_hash checks the given hash in the files dict against the actual
296 files on disk. The hash values need to be present consistently in
297 all file entries. It does not modify its input in any way.
301 for f in files.keys():
305 file_handle = open_file(f)
307 # Check for the hash entry, to not trigger a KeyError.
308 if not files[f].has_key(hash_key(hashname)):
309 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
313 # Actually check the hash for correctness.
314 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
315 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
317 except CantOpenError:
318 # TODO: This happens when the file is in the pool.
319 # warn("Cannot open file %s" % f)
326 ################################################################################
328 def check_size(where, files):
330 check_size checks the file sizes in the passed files dict against the
335 for f in files.keys():
340 # TODO: This happens when the file is in the pool.
344 actual_size = entry[stat.ST_SIZE]
345 size = int(files[f]["size"])
346 if size != actual_size:
347 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
348 % (f, actual_size, size, where))
351 ################################################################################
353 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
355 Verify that the files listed in the Files field of the .dsc are
356 those expected given the announced Format.
358 @type dsc_filename: string
359 @param dsc_filename: path of .dsc file
362 @param dsc: the content of the .dsc parsed by C{parse_changes()}
364 @type dsc_files: dict
365 @param dsc_files: the file list returned by C{build_file_list()}
368 @return: all errors detected
372 # Parse the file if needed
374 dsc = parse_changes(dsc_filename, signing_rules=1);
376 if dsc_files is None:
377 dsc_files = build_file_list(dsc, is_a_dsc=1)
379 # Ensure .dsc lists proper set of source files according to the format
381 has = defaultdict(lambda: 0)
384 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
385 (r'diff.gz', ('debian_diff',)),
386 (r'tar.gz', ('native_tar_gz', 'native_tar')),
387 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
388 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
389 (r'tar\.(gz|bz2)', ('native_tar',)),
390 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
393 for f in dsc_files.keys():
394 m = re_issource.match(f)
396 rejmsg.append("%s: %s in Files field not recognised as source."
400 # Populate 'has' dictionary by resolving keys in lookup table
402 for regex, keys in ftype_lookup:
403 if re.match(regex, m.group(3)):
409 # File does not match anything in lookup table; reject
411 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
413 # Check for multiple files
414 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
415 if has[file_type] > 1:
416 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
418 # Source format specific tests
419 for format in srcformats:
420 if format.re_format.match(dsc['format']):
422 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
428 ################################################################################
430 def check_hash_fields(what, manifest):
432 check_hash_fields ensures that there are no checksum fields in the
433 given dict that we do not know about.
437 hashes = map(lambda x: x[0], known_hashes)
438 for field in manifest:
439 if field.startswith("checksums-"):
440 hashname = field.split("-",1)[1]
441 if hashname not in hashes:
442 rejmsg.append("Unsupported checksum field for %s "\
443 "in %s" % (hashname, what))
446 ################################################################################
448 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
449 if format >= version:
450 # The version should contain the specified hash.
453 # Import hashes from the changes
454 rejmsg = parse_checksums(".changes", files, changes, hashname)
458 # We need to calculate the hash because it can't possibly
461 return func(".changes", files, hashname, hashfunc)
463 # We could add the orig which might be in the pool to the files dict to
464 # access the checksums easily.
466 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
468 ensure_dsc_hashes' task is to ensure that each and every *present* hash
469 in the dsc is correct, i.e. identical to the changes file and if necessary
470 the pool. The latter task is delegated to check_hash.
474 if not dsc.has_key('Checksums-%s' % (hashname,)):
476 # Import hashes from the dsc
477 parse_checksums(".dsc", dsc_files, dsc, hashname)
479 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
482 ################################################################################
484 def parse_checksums(where, files, manifest, hashname):
486 field = 'checksums-%s' % hashname
487 if not field in manifest:
489 for line in manifest[field].split('\n'):
492 clist = line.strip().split(' ')
494 checksum, size, checkfile = clist
496 rejmsg.append("Cannot parse checksum line [%s]" % (line))
498 if not files.has_key(checkfile):
499 # TODO: check for the file's entry in the original files dict, not
500 # the one modified by (auto)byhand and other weird stuff
501 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
502 # (file, hashname, where))
504 if not files[checkfile]["size"] == size:
505 rejmsg.append("%s: size differs for files and checksums-%s entry "\
506 "in %s" % (checkfile, hashname, where))
508 files[checkfile][hash_key(hashname)] = checksum
509 for f in files.keys():
510 if not files[f].has_key(hash_key(hashname)):
511 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
515 ################################################################################
517 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
519 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
522 # Make sure we have a Files: field to parse...
523 if not changes.has_key(field):
524 raise NoFilesFieldError
526 # Make sure we recognise the format of the Files: field
527 format = re_verwithext.search(changes.get("format", "0.0"))
529 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
531 format = format.groups()
532 if format[1] == None:
533 format = int(float(format[0])), 0, format[2]
535 format = int(format[0]), int(format[1]), format[2]
536 if format[2] == None:
540 # format = (0,0) are missing format headers of which we still
541 # have some in the archive.
542 if format != (1,0) and format != (0,0) and \
543 format != (3,0,"quilt") and format != (3,0,"native"):
544 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
546 if (format < (1,5) or format > (1,8)):
547 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
548 if field != "files" and format < (1,8):
549 raise UnknownFormatError, "%s" % (changes.get("format","0.0"))
551 includes_section = (not is_a_dsc) and field == "files"
553 # Parse each entry/line:
554 for i in changes[field].split('\n'):
558 section = priority = ""
561 (md5, size, section, priority, name) = s
563 (md5, size, name) = s
565 raise ParseChangesError, i
572 (section, component) = extract_component_from_section(section)
574 files[name] = Dict(size=size, section=section,
575 priority=priority, component=component)
576 files[name][hashname] = md5
580 ################################################################################
582 def send_mail (message, filename=""):
583 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
585 # If we've been passed a string dump it into a temporary file
587 (fd, filename) = tempfile.mkstemp()
588 os.write (fd, message)
591 if Cnf.has_key("Dinstall::MailWhiteList") and \
592 Cnf["Dinstall::MailWhiteList"] != "":
593 message_in = open_file(filename)
594 message_raw = modemail.message_from_file(message_in)
598 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
600 for line in whitelist_in:
601 if not re_whitespace_comment.match(line):
602 if re_re_mark.match(line):
603 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
605 whitelist.append(re.compile(re.escape(line.strip())))
610 fields = ["To", "Bcc", "Cc"]
613 value = message_raw.get(field, None)
616 for item in value.split(","):
617 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
623 if not mail_whitelisted:
624 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
628 # Doesn't have any mail in whitelist so remove the header
630 del message_raw[field]
632 message_raw.replace_header(field, string.join(match, ", "))
634 # Change message fields in order if we don't have a To header
635 if not message_raw.has_key("To"):
638 if message_raw.has_key(field):
639 message_raw[fields[-1]] = message_raw[field]
640 del message_raw[field]
643 # Clean up any temporary files
644 # and return, as we removed all recipients.
646 os.unlink (filename);
649 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
650 os.write (fd, message_raw.as_string(True));
654 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
656 raise SendmailFailedError, output
658 # Clean up any temporary files
662 ################################################################################
664 def poolify (source, component):
667 if source[:3] == "lib":
668 return component + source[:4] + '/' + source + '/'
670 return component + source[:1] + '/' + source + '/'
672 ################################################################################
674 def move (src, dest, overwrite = 0, perms = 0664):
675 if os.path.exists(dest) and os.path.isdir(dest):
678 dest_dir = os.path.dirname(dest)
679 if not os.path.exists(dest_dir):
680 umask = os.umask(00000)
681 os.makedirs(dest_dir, 02775)
683 #print "Moving %s to %s..." % (src, dest)
684 if os.path.exists(dest) and os.path.isdir(dest):
685 dest += '/' + os.path.basename(src)
686 # Don't overwrite unless forced to
687 if os.path.exists(dest):
689 fubar("Can't move %s to %s - file already exists." % (src, dest))
691 if not os.access(dest, os.W_OK):
692 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
693 shutil.copy2(src, dest)
694 os.chmod(dest, perms)
697 def copy (src, dest, overwrite = 0, perms = 0664):
698 if os.path.exists(dest) and os.path.isdir(dest):
701 dest_dir = os.path.dirname(dest)
702 if not os.path.exists(dest_dir):
703 umask = os.umask(00000)
704 os.makedirs(dest_dir, 02775)
706 #print "Copying %s to %s..." % (src, dest)
707 if os.path.exists(dest) and os.path.isdir(dest):
708 dest += '/' + os.path.basename(src)
709 # Don't overwrite unless forced to
710 if os.path.exists(dest):
712 raise FileExistsError
714 if not os.access(dest, os.W_OK):
715 raise CantOverwriteError
716 shutil.copy2(src, dest)
717 os.chmod(dest, perms)
719 ################################################################################
722 res = socket.gethostbyaddr(socket.gethostname())
723 database_hostname = Cnf.get("Config::" + res[0] + "::DatabaseHostname")
724 if database_hostname:
725 return database_hostname
729 def which_conf_file ():
730 res = socket.gethostbyaddr(socket.gethostname())
731 # In case we allow local config files per user, try if one exists
732 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
733 homedir = os.getenv("HOME")
734 confpath = os.path.join(homedir, "/etc/dak.conf")
735 if os.path.exists(confpath):
736 apt_pkg.ReadConfigFileISC(Cnf,default_config)
738 # We are still in here, so there is no local config file or we do
739 # not allow local files. Do the normal stuff.
740 if Cnf.get("Config::" + res[0] + "::DakConfig"):
741 return Cnf["Config::" + res[0] + "::DakConfig"]
743 return default_config
745 def which_apt_conf_file ():
746 res = socket.gethostbyaddr(socket.gethostname())
747 # In case we allow local config files per user, try if one exists
748 if Cnf.FindB("Config::" + res[0] + "::AllowLocalConfig"):
749 homedir = os.getenv("HOME")
750 confpath = os.path.join(homedir, "/etc/dak.conf")
751 if os.path.exists(confpath):
752 apt_pkg.ReadConfigFileISC(Cnf,default_config)
754 if Cnf.get("Config::" + res[0] + "::AptConfig"):
755 return Cnf["Config::" + res[0] + "::AptConfig"]
757 return default_apt_config
759 def which_alias_file():
760 hostname = socket.gethostbyaddr(socket.gethostname())[0]
761 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
762 if os.path.exists(aliasfn):
767 ################################################################################
769 def TemplateSubst(map, filename):
770 """ Perform a substition of template """
771 templatefile = open_file(filename)
772 template = templatefile.read()
774 template = template.replace(x, str(map[x]))
778 ################################################################################
780 def fubar(msg, exit_code=1):
781 sys.stderr.write("E: %s\n" % (msg))
785 sys.stderr.write("W: %s\n" % (msg))
787 ################################################################################
789 # Returns the user name with a laughable attempt at rfc822 conformancy
790 # (read: removing stray periods).
792 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
795 return pwd.getpwuid(os.getuid())[0]
797 ################################################################################
807 return ("%d%s" % (c, t))
809 ################################################################################
811 def cc_fix_changes (changes):
812 o = changes.get("architecture", "")
814 del changes["architecture"]
815 changes["architecture"] = {}
817 changes["architecture"][j] = 1
819 def changes_compare (a, b):
820 """ Sort by source name, source version, 'have source', and then by filename """
822 a_changes = parse_changes(a)
827 b_changes = parse_changes(b)
831 cc_fix_changes (a_changes)
832 cc_fix_changes (b_changes)
834 # Sort by source name
835 a_source = a_changes.get("source")
836 b_source = b_changes.get("source")
837 q = cmp (a_source, b_source)
841 # Sort by source version
842 a_version = a_changes.get("version", "0")
843 b_version = b_changes.get("version", "0")
844 q = apt_pkg.VersionCompare(a_version, b_version)
848 # Sort by 'have source'
849 a_has_source = a_changes["architecture"].get("source")
850 b_has_source = b_changes["architecture"].get("source")
851 if a_has_source and not b_has_source:
853 elif b_has_source and not a_has_source:
856 # Fall back to sort by filename
859 ################################################################################
861 def find_next_free (dest, too_many=100):
864 while os.path.exists(dest) and extra < too_many:
865 dest = orig_dest + '.' + repr(extra)
867 if extra >= too_many:
868 raise NoFreeFilenameError
871 ################################################################################
873 def result_join (original, sep = '\t'):
875 for i in xrange(len(original)):
876 if original[i] == None:
877 resultlist.append("")
879 resultlist.append(original[i])
880 return sep.join(resultlist)
882 ################################################################################
884 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
886 for line in str.split('\n'):
888 if line or include_blank_lines:
889 out += "%s%s\n" % (prefix, line)
890 # Strip trailing new line
895 ################################################################################
897 def validate_changes_file_arg(filename, require_changes=1):
899 'filename' is either a .changes or .dak file. If 'filename' is a
900 .dak file, it's changed to be the corresponding .changes file. The
901 function then checks if the .changes file a) exists and b) is
902 readable and returns the .changes filename if so. If there's a
903 problem, the next action depends on the option 'require_changes'
906 - If 'require_changes' == -1, errors are ignored and the .changes
907 filename is returned.
908 - If 'require_changes' == 0, a warning is given and 'None' is returned.
909 - If 'require_changes' == 1, a fatal error is raised.
914 orig_filename = filename
915 if filename.endswith(".dak"):
916 filename = filename[:-4]+".changes"
918 if not filename.endswith(".changes"):
919 error = "invalid file type; not a changes file"
921 if not os.access(filename,os.R_OK):
922 if os.path.exists(filename):
923 error = "permission denied"
925 error = "file not found"
928 if require_changes == 1:
929 fubar("%s: %s." % (orig_filename, error))
930 elif require_changes == 0:
931 warn("Skipping %s - %s" % (orig_filename, error))
933 else: # We only care about the .dak file
938 ################################################################################
941 return (arch != "source" and arch != "all")
943 ################################################################################
945 def join_with_commas_and(list):
946 if len(list) == 0: return "nothing"
947 if len(list) == 1: return list[0]
948 return ", ".join(list[:-1]) + " and " + list[-1]
950 ################################################################################
955 (pkg, version, constraint) = atom
957 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
960 pp_deps.append(pp_dep)
961 return " |".join(pp_deps)
963 ################################################################################
968 ################################################################################
970 def parse_args(Options):
971 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
972 # XXX: This should go away and everything which calls it be converted
973 # to use SQLA properly. For now, we'll just fix it not to use
974 # the old Pg interface though
975 session = DBConn().session()
979 for suitename in split_args(Options["Suite"]):
980 suite = get_suite(suitename, session=session)
981 if suite.suite_id is None:
982 warn("suite '%s' not recognised." % (suite.suite_name))
984 suite_ids_list.append(suite.suite_id)
986 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
988 fubar("No valid suite given.")
993 if Options["Component"]:
994 component_ids_list = []
995 for componentname in split_args(Options["Component"]):
996 component = get_component(componentname, session=session)
997 if component is None:
998 warn("component '%s' not recognised." % (componentname))
1000 component_ids_list.append(component.component_id)
1001 if component_ids_list:
1002 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1004 fubar("No valid component given.")
1008 # Process architecture
1009 con_architectures = ""
1011 if Options["Architecture"]:
1013 for archname in split_args(Options["Architecture"]):
1014 if archname == "source":
1017 arch = get_architecture(archname, session=session)
1019 warn("architecture '%s' not recognised." % (archname))
1021 arch_ids_list.append(arch.arch_id)
1023 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1025 if not check_source:
1026 fubar("No valid architecture given.")
1030 return (con_suites, con_architectures, con_components, check_source)
1032 ################################################################################
1034 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1035 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1038 tb = sys.exc_info()[2]
1045 frame = frame.f_back
1047 traceback.print_exc()
1049 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1050 frame.f_code.co_filename,
1052 for key, value in frame.f_locals.items():
1053 print "\t%20s = " % key,
1057 print "<unable to print>"
1059 ################################################################################
1061 def try_with_debug(function):
1069 ################################################################################
1071 def arch_compare_sw (a, b):
1073 Function for use in sorting lists of architectures.
1075 Sorts normally except that 'source' dominates all others.
1078 if a == "source" and b == "source":
1087 ################################################################################
1089 def split_args (s, dwim=1):
1091 Split command line arguments which can be separated by either commas
1092 or whitespace. If dwim is set, it will complain about string ending
1093 in comma since this usually means someone did 'dak ls -a i386, m68k
1094 foo' or something and the inevitable confusion resulting from 'm68k'
1095 being treated as an argument is undesirable.
1098 if s.find(",") == -1:
1101 if s[-1:] == "," and dwim:
1102 fubar("split_args: found trailing comma, spurious space maybe?")
1105 ################################################################################
1107 def Dict(**dict): return dict
1109 ########################################
1111 def gpgv_get_status_output(cmd, status_read, status_write):
1113 Our very own version of commands.getouputstatus(), hacked to support
1117 cmd = ['/bin/sh', '-c', cmd]
1118 p2cread, p2cwrite = os.pipe()
1119 c2pread, c2pwrite = os.pipe()
1120 errout, errin = os.pipe()
1130 for i in range(3, 256):
1131 if i != status_write:
1137 os.execvp(cmd[0], cmd)
1143 os.dup2(c2pread, c2pwrite)
1144 os.dup2(errout, errin)
1146 output = status = ""
1148 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1151 r = os.read(fd, 8196)
1153 more_data.append(fd)
1154 if fd == c2pwrite or fd == errin:
1156 elif fd == status_read:
1159 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1161 pid, exit_status = os.waitpid(pid, 0)
1163 os.close(status_write)
1164 os.close(status_read)
1174 return output, status, exit_status
1176 ################################################################################
1178 def process_gpgv_output(status):
1179 # Process the status-fd output
1182 for line in status.split('\n'):
1186 split = line.split()
1188 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1190 (gnupg, keyword) = split[:2]
1191 if gnupg != "[GNUPG:]":
1192 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1195 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1196 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1199 keywords[keyword] = args
1201 return (keywords, internal_error)
1203 ################################################################################
1205 def retrieve_key (filename, keyserver=None, keyring=None):
1207 Retrieve the key that signed 'filename' from 'keyserver' and
1208 add it to 'keyring'. Returns nothing on success, or an error message
1212 # Defaults for keyserver and keyring
1214 keyserver = Cnf["Dinstall::KeyServer"]
1216 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1218 # Ensure the filename contains no shell meta-characters or other badness
1219 if not re_taint_free.match(filename):
1220 return "%s: tainted filename" % (filename)
1222 # Invoke gpgv on the file
1223 status_read, status_write = os.pipe()
1224 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1225 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1227 # Process the status-fd output
1228 (keywords, internal_error) = process_gpgv_output(status)
1230 return internal_error
1232 if not keywords.has_key("NO_PUBKEY"):
1233 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1235 fingerprint = keywords["NO_PUBKEY"][0]
1236 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1237 # it'll try to create a lockfile in /dev. A better solution might
1238 # be a tempfile or something.
1239 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1240 % (Cnf["Dinstall::SigningKeyring"])
1241 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1242 % (keyring, keyserver, fingerprint)
1243 (result, output) = commands.getstatusoutput(cmd)
1245 return "'%s' failed with exit code %s" % (cmd, result)
1249 ################################################################################
1251 def gpg_keyring_args(keyrings=None):
1253 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1255 return " ".join(["--keyring %s" % x for x in keyrings])
1257 ################################################################################
1259 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1261 Check the signature of a file and return the fingerprint if the
1262 signature is valid or 'None' if it's not. The first argument is the
1263 filename whose signature should be checked. The second argument is a
1264 reject function and is called when an error is found. The reject()
1265 function must allow for two arguments: the first is the error message,
1266 the second is an optional prefix string. It's possible for reject()
1267 to be called more than once during an invocation of check_signature().
1268 The third argument is optional and is the name of the files the
1269 detached signature applies to. The fourth argument is optional and is
1270 a *list* of keyrings to use. 'autofetch' can either be None, True or
1271 False. If None, the default behaviour specified in the config will be
1277 # Ensure the filename contains no shell meta-characters or other badness
1278 if not re_taint_free.match(sig_filename):
1279 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1280 return (None, rejects)
1282 if data_filename and not re_taint_free.match(data_filename):
1283 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1284 return (None, rejects)
1287 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1289 # Autofetch the signing key if that's enabled
1290 if autofetch == None:
1291 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1293 error_msg = retrieve_key(sig_filename)
1295 rejects.append(error_msg)
1296 return (None, rejects)
1298 # Build the command line
1299 status_read, status_write = os.pipe()
1300 cmd = "gpgv --status-fd %s %s %s %s" % (
1301 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1303 # Invoke gpgv on the file
1304 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1306 # Process the status-fd output
1307 (keywords, internal_error) = process_gpgv_output(status)
1309 # If we failed to parse the status-fd output, let's just whine and bail now
1311 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1312 rejects.append(internal_error, "")
1313 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1314 return (None, rejects)
1316 # Now check for obviously bad things in the processed output
1317 if keywords.has_key("KEYREVOKED"):
1318 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1319 if keywords.has_key("BADSIG"):
1320 rejects.append("bad signature on %s." % (sig_filename))
1321 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1322 rejects.append("failed to check signature on %s." % (sig_filename))
1323 if keywords.has_key("NO_PUBKEY"):
1324 args = keywords["NO_PUBKEY"]
1327 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1328 if keywords.has_key("BADARMOR"):
1329 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1330 if keywords.has_key("NODATA"):
1331 rejects.append("no signature found in %s." % (sig_filename))
1332 if keywords.has_key("EXPKEYSIG"):
1333 args = keywords["EXPKEYSIG"]
1336 rejects.append("Signature made by expired key 0x%s" % (key))
1337 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1338 args = keywords["KEYEXPIRED"]
1342 if timestamp.count("T") == 0:
1344 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1346 expiredate = "unknown (%s)" % (timestamp)
1348 expiredate = timestamp
1349 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1351 if len(rejects) > 0:
1352 return (None, rejects)
1354 # Next check gpgv exited with a zero return code
1356 rejects.append("gpgv failed while checking %s." % (sig_filename))
1358 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "), "")
1360 rejects.append(prefix_multi_line_string(output, " [GPG output:] "), "")
1361 return (None, rejects)
1363 # Sanity check the good stuff we expect
1364 if not keywords.has_key("VALIDSIG"):
1365 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1367 args = keywords["VALIDSIG"]
1369 rejects.append("internal error while checking signature on %s." % (sig_filename))
1371 fingerprint = args[0]
1372 if not keywords.has_key("GOODSIG"):
1373 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1374 if not keywords.has_key("SIG_ID"):
1375 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1377 # Finally ensure there's not something we don't recognise
1378 known_keywords = Dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1379 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1380 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="")
1382 for keyword in keywords.keys():
1383 if not known_keywords.has_key(keyword):
1384 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1386 if len(rejects) > 0:
1387 return (None, rejects)
1389 return (fingerprint, [])
1391 ################################################################################
1393 def gpg_get_key_addresses(fingerprint):
1394 """retreive email addresses from gpg key uids for a given fingerprint"""
1395 addresses = key_uid_email_cache.get(fingerprint)
1396 if addresses != None:
1399 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1400 % (gpg_keyring_args(), fingerprint)
1401 (result, output) = commands.getstatusoutput(cmd)
1403 for l in output.split('\n'):
1404 m = re_gpg_uid.match(l)
1406 addresses.add(m.group(1))
1407 key_uid_email_cache[fingerprint] = addresses
1410 ################################################################################
1412 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1414 def wrap(paragraph, max_length, prefix=""):
1418 words = paragraph.split()
1421 word_size = len(word)
1422 if word_size > max_length:
1424 s += line + '\n' + prefix
1425 s += word + '\n' + prefix
1428 new_length = len(line) + word_size + 1
1429 if new_length > max_length:
1430 s += line + '\n' + prefix
1443 ################################################################################
1445 def clean_symlink (src, dest, root):
1447 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1450 src = src.replace(root, '', 1)
1451 dest = dest.replace(root, '', 1)
1452 dest = os.path.dirname(dest)
1453 new_src = '../' * len(dest.split('/'))
1454 return new_src + src
1456 ################################################################################
1458 def temp_filename(directory=None, prefix="dak", suffix=""):
1460 Return a secure and unique filename by pre-creating it.
1461 If 'directory' is non-null, it will be the directory the file is pre-created in.
1462 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1463 If 'suffix' is non-null, the filename will end with it.
1465 Returns a pair (fd, name).
1468 return tempfile.mkstemp(suffix, prefix, directory)
1470 ################################################################################
1472 def temp_dirname(parent=None, prefix="dak", suffix=""):
1474 Return a secure and unique directory by pre-creating it.
1475 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1476 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1477 If 'suffix' is non-null, the filename will end with it.
1479 Returns a pathname to the new directory
1482 return tempfile.mkdtemp(suffix, prefix, parent)
1484 ################################################################################
1486 def is_email_alias(email):
1487 """ checks if the user part of the email is listed in the alias file """
1489 if alias_cache == None:
1490 aliasfn = which_alias_file()
1493 for l in open(aliasfn):
1494 alias_cache.add(l.split(':')[0])
1495 uid = email.split('@')[0]
1496 return uid in alias_cache
1498 ################################################################################
1500 def get_changes_files(dir):
1502 Takes a directory and lists all .changes files in it (as well as chdir'ing
1503 to the directory; this is due to broken behaviour on the part of p-u/p-a
1504 when you're not in the right place)
1506 Returns a list of filenames
1509 # Much of the rest of p-u/p-a depends on being in the right place
1511 changes_files = [x for x in os.listdir(dir) if x.endswith('.changes')]
1513 fubar("Failed to read list from directory %s (%s)" % (dir, e))
1515 return changes_files
1517 ################################################################################
1521 Cnf = apt_pkg.newConfiguration()
1522 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1524 if which_conf_file() != default_config:
1525 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1527 ###############################################################################