2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite
43 from dak_exceptions import *
44 from textutils import fix_maintainer
45 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
46 re_multi_line_field, re_srchasver, re_taint_free, \
47 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
50 from formats import parse_format, validate_changes_format
51 from srcformats import get_format_from_string
52 from collections import defaultdict
54 ################################################################################
56 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
57 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
59 alias_cache = None #: Cache for email alias checks
60 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
62 # (hashname, function, earliest_changes_version)
63 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
64 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
66 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
67 # code in lenny's Python. This also affects commands.getoutput and
69 def dak_getstatusoutput(cmd):
70 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
71 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
73 output, _ = pipe.communicate()
75 if output[-1:] == '\n':
83 commands.getstatusoutput = dak_getstatusoutput
85 ################################################################################
88 """ Escape html chars """
89 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
91 ################################################################################
93 def open_file(filename, mode='r'):
95 Open C{file}, return fileobject.
97 @type filename: string
98 @param filename: path/filename to open
101 @param mode: open mode
104 @return: open fileobject
106 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
110 f = open(filename, mode)
112 raise CantOpenError, filename
115 ################################################################################
117 def our_raw_input(prompt=""):
121 sys.stdout.write(prompt)
130 sys.stderr.write("\nUser interrupt (^D).\n")
133 ################################################################################
135 def extract_component_from_section(section):
138 if section.find('/') != -1:
139 component = section.split('/')[0]
141 # Expand default component
143 if Cnf.has_key("Component::%s" % section):
148 return (section, component)
150 ################################################################################
152 def parse_deb822(contents, signing_rules=0):
156 # Split the lines in the input, keeping the linebreaks.
157 lines = contents.splitlines(True)
160 raise ParseChangesError, "[Empty changes file]"
162 # Reindex by line number so we can easily verify the format of
168 indexed_lines[index] = line[:-1]
172 num_of_lines = len(indexed_lines.keys())
175 while index < num_of_lines:
177 line = indexed_lines[index]
179 if signing_rules == 1:
181 if index > num_of_lines:
182 raise InvalidDscError, index
183 line = indexed_lines[index]
184 if not line.startswith("-----BEGIN PGP SIGNATURE"):
185 raise InvalidDscError, index
190 if line.startswith("-----BEGIN PGP SIGNATURE"):
192 if line.startswith("-----BEGIN PGP SIGNED MESSAGE"):
194 if signing_rules == 1:
195 while index < num_of_lines and line != "":
197 line = indexed_lines[index]
199 # If we're not inside the signed data, don't process anything
200 if signing_rules >= 0 and not inside_signature:
202 slf = re_single_line_field.match(line)
204 field = slf.groups()[0].lower()
205 changes[field] = slf.groups()[1]
209 changes[field] += '\n'
211 mlf = re_multi_line_field.match(line)
214 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
215 if first == 1 and changes[field] != "":
216 changes[field] += '\n'
218 changes[field] += mlf.groups()[0] + '\n'
222 if signing_rules == 1 and inside_signature:
223 raise InvalidDscError, index
225 changes["filecontents"] = "".join(lines)
227 if changes.has_key("source"):
228 # Strip the source version in brackets from the source field,
229 # put it in the "source-version" field instead.
230 srcver = re_srchasver.search(changes["source"])
232 changes["source"] = srcver.group(1)
233 changes["source-version"] = srcver.group(2)
236 raise ParseChangesError, error
240 ################################################################################
242 def parse_changes(filename, signing_rules=0, dsc_file=0):
244 Parses a changes file and returns a dictionary where each field is a
245 key. The mandatory first argument is the filename of the .changes
248 signing_rules is an optional argument:
250 - If signing_rules == -1, no signature is required.
251 - If signing_rules == 0 (the default), a signature is required.
252 - If signing_rules == 1, it turns on the same strict format checking
255 The rules for (signing_rules == 1)-mode are:
257 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
258 followed by any PGP header data and must end with a blank line.
260 - The data section must end with a blank line and must be followed by
261 "-----BEGIN PGP SIGNATURE-----".
264 changes_in = open_file(filename)
265 content = changes_in.read()
268 unicode(content, 'utf-8')
270 raise ChangesUnicodeError, "Changes file not proper utf-8"
271 changes = parse_deb822(content, signing_rules)
275 # Finally ensure that everything needed for .changes is there
276 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
277 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
280 for keyword in must_keywords:
281 if not changes.has_key(keyword.lower()):
282 missingfields.append(keyword)
284 if len(missingfields):
285 raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)
289 ################################################################################
291 def hash_key(hashname):
292 return '%ssum' % hashname
294 ################################################################################
296 def create_hash(where, files, hashname, hashfunc):
298 create_hash extends the passed files dict with the given hash by
299 iterating over all files on disk and passing them to the hashing
304 for f in files.keys():
306 file_handle = open_file(f)
307 except CantOpenError:
308 rejmsg.append("Could not open file %s for checksumming" % (f))
311 files[f][hash_key(hashname)] = hashfunc(file_handle)
316 ################################################################################
318 def check_hash(where, files, hashname, hashfunc):
320 check_hash checks the given hash in the files dict against the actual
321 files on disk. The hash values need to be present consistently in
322 all file entries. It does not modify its input in any way.
326 for f in files.keys():
330 file_handle = open_file(f)
332 # Check for the hash entry, to not trigger a KeyError.
333 if not files[f].has_key(hash_key(hashname)):
334 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
338 # Actually check the hash for correctness.
339 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
340 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
342 except CantOpenError:
343 # TODO: This happens when the file is in the pool.
344 # warn("Cannot open file %s" % f)
351 ################################################################################
353 def check_size(where, files):
355 check_size checks the file sizes in the passed files dict against the
360 for f in files.keys():
365 # TODO: This happens when the file is in the pool.
369 actual_size = entry[stat.ST_SIZE]
370 size = int(files[f]["size"])
371 if size != actual_size:
372 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
373 % (f, actual_size, size, where))
376 ################################################################################
378 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
380 Verify that the files listed in the Files field of the .dsc are
381 those expected given the announced Format.
383 @type dsc_filename: string
384 @param dsc_filename: path of .dsc file
387 @param dsc: the content of the .dsc parsed by C{parse_changes()}
389 @type dsc_files: dict
390 @param dsc_files: the file list returned by C{build_file_list()}
393 @return: all errors detected
397 # Parse the file if needed
399 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
401 if dsc_files is None:
402 dsc_files = build_file_list(dsc, is_a_dsc=1)
404 # Ensure .dsc lists proper set of source files according to the format
406 has = defaultdict(lambda: 0)
409 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
410 (r'diff.gz', ('debian_diff',)),
411 (r'tar.gz', ('native_tar_gz', 'native_tar')),
412 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
413 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
414 (r'tar\.(gz|bz2)', ('native_tar',)),
415 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
418 for f in dsc_files.keys():
419 m = re_issource.match(f)
421 rejmsg.append("%s: %s in Files field not recognised as source."
425 # Populate 'has' dictionary by resolving keys in lookup table
427 for regex, keys in ftype_lookup:
428 if re.match(regex, m.group(3)):
434 # File does not match anything in lookup table; reject
436 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
438 # Check for multiple files
439 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
440 if has[file_type] > 1:
441 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
443 # Source format specific tests
445 format = get_format_from_string(dsc['format'])
447 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
450 except UnknownFormatError:
451 # Not an error here for now
456 ################################################################################
458 def check_hash_fields(what, manifest):
460 check_hash_fields ensures that there are no checksum fields in the
461 given dict that we do not know about.
465 hashes = map(lambda x: x[0], known_hashes)
466 for field in manifest:
467 if field.startswith("checksums-"):
468 hashname = field.split("-",1)[1]
469 if hashname not in hashes:
470 rejmsg.append("Unsupported checksum field for %s "\
471 "in %s" % (hashname, what))
474 ################################################################################
476 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
477 if format >= version:
478 # The version should contain the specified hash.
481 # Import hashes from the changes
482 rejmsg = parse_checksums(".changes", files, changes, hashname)
486 # We need to calculate the hash because it can't possibly
489 return func(".changes", files, hashname, hashfunc)
491 # We could add the orig which might be in the pool to the files dict to
492 # access the checksums easily.
494 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
496 ensure_dsc_hashes' task is to ensure that each and every *present* hash
497 in the dsc is correct, i.e. identical to the changes file and if necessary
498 the pool. The latter task is delegated to check_hash.
502 if not dsc.has_key('Checksums-%s' % (hashname,)):
504 # Import hashes from the dsc
505 parse_checksums(".dsc", dsc_files, dsc, hashname)
507 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
510 ################################################################################
512 def parse_checksums(where, files, manifest, hashname):
514 field = 'checksums-%s' % hashname
515 if not field in manifest:
517 for line in manifest[field].split('\n'):
520 clist = line.strip().split(' ')
522 checksum, size, checkfile = clist
524 rejmsg.append("Cannot parse checksum line [%s]" % (line))
526 if not files.has_key(checkfile):
527 # TODO: check for the file's entry in the original files dict, not
528 # the one modified by (auto)byhand and other weird stuff
529 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
530 # (file, hashname, where))
532 if not files[checkfile]["size"] == size:
533 rejmsg.append("%s: size differs for files and checksums-%s entry "\
534 "in %s" % (checkfile, hashname, where))
536 files[checkfile][hash_key(hashname)] = checksum
537 for f in files.keys():
538 if not files[f].has_key(hash_key(hashname)):
539 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
543 ################################################################################
545 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
547 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
550 # Make sure we have a Files: field to parse...
551 if not changes.has_key(field):
552 raise NoFilesFieldError
554 # Validate .changes Format: field
556 validate_changes_format(parse_format(changes['format']), field)
558 includes_section = (not is_a_dsc) and field == "files"
560 # Parse each entry/line:
561 for i in changes[field].split('\n'):
565 section = priority = ""
568 (md5, size, section, priority, name) = s
570 (md5, size, name) = s
572 raise ParseChangesError, i
579 (section, component) = extract_component_from_section(section)
581 files[name] = dict(size=size, section=section,
582 priority=priority, component=component)
583 files[name][hashname] = md5
587 ################################################################################
589 def send_mail (message, filename=""):
590 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
592 # Check whether we're supposed to be sending mail
593 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
596 # If we've been passed a string dump it into a temporary file
598 (fd, filename) = tempfile.mkstemp()
599 os.write (fd, message)
602 if Cnf.has_key("Dinstall::MailWhiteList") and \
603 Cnf["Dinstall::MailWhiteList"] != "":
604 message_in = open_file(filename)
605 message_raw = modemail.message_from_file(message_in)
609 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
611 for line in whitelist_in:
612 if not re_whitespace_comment.match(line):
613 if re_re_mark.match(line):
614 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
616 whitelist.append(re.compile(re.escape(line.strip())))
621 fields = ["To", "Bcc", "Cc"]
624 value = message_raw.get(field, None)
627 for item in value.split(","):
628 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
634 if not mail_whitelisted:
635 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
639 # Doesn't have any mail in whitelist so remove the header
641 del message_raw[field]
643 message_raw.replace_header(field, ', '.join(match))
645 # Change message fields in order if we don't have a To header
646 if not message_raw.has_key("To"):
649 if message_raw.has_key(field):
650 message_raw[fields[-1]] = message_raw[field]
651 del message_raw[field]
654 # Clean up any temporary files
655 # and return, as we removed all recipients.
657 os.unlink (filename);
660 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
661 os.write (fd, message_raw.as_string(True));
665 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
667 raise SendmailFailedError, output
669 # Clean up any temporary files
673 ################################################################################
675 def poolify (source, component):
678 if source[:3] == "lib":
679 return component + source[:4] + '/' + source + '/'
681 return component + source[:1] + '/' + source + '/'
683 ################################################################################
685 def move (src, dest, overwrite = 0, perms = 0664):
686 if os.path.exists(dest) and os.path.isdir(dest):
689 dest_dir = os.path.dirname(dest)
690 if not os.path.exists(dest_dir):
691 umask = os.umask(00000)
692 os.makedirs(dest_dir, 02775)
694 #print "Moving %s to %s..." % (src, dest)
695 if os.path.exists(dest) and os.path.isdir(dest):
696 dest += '/' + os.path.basename(src)
697 # Don't overwrite unless forced to
698 if os.path.exists(dest):
700 fubar("Can't move %s to %s - file already exists." % (src, dest))
702 if not os.access(dest, os.W_OK):
703 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
704 shutil.copy2(src, dest)
705 os.chmod(dest, perms)
708 def copy (src, dest, overwrite = 0, perms = 0664):
709 if os.path.exists(dest) and os.path.isdir(dest):
712 dest_dir = os.path.dirname(dest)
713 if not os.path.exists(dest_dir):
714 umask = os.umask(00000)
715 os.makedirs(dest_dir, 02775)
717 #print "Copying %s to %s..." % (src, dest)
718 if os.path.exists(dest) and os.path.isdir(dest):
719 dest += '/' + os.path.basename(src)
720 # Don't overwrite unless forced to
721 if os.path.exists(dest):
723 raise FileExistsError
725 if not os.access(dest, os.W_OK):
726 raise CantOverwriteError
727 shutil.copy2(src, dest)
728 os.chmod(dest, perms)
730 ################################################################################
733 res = socket.getfqdn()
734 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
735 if database_hostname:
736 return database_hostname
740 def which_conf_file ():
741 if os.getenv('DAK_CONFIG'):
742 return os.getenv('DAK_CONFIG')
744 res = socket.getfqdn()
745 # In case we allow local config files per user, try if one exists
746 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
747 homedir = os.getenv("HOME")
748 confpath = os.path.join(homedir, "/etc/dak.conf")
749 if os.path.exists(confpath):
750 apt_pkg.ReadConfigFileISC(Cnf,default_config)
752 # We are still in here, so there is no local config file or we do
753 # not allow local files. Do the normal stuff.
754 if Cnf.get("Config::" + res + "::DakConfig"):
755 return Cnf["Config::" + res + "::DakConfig"]
757 return default_config
759 def which_apt_conf_file ():
760 res = socket.getfqdn()
761 # In case we allow local config files per user, try if one exists
762 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
763 homedir = os.getenv("HOME")
764 confpath = os.path.join(homedir, "/etc/dak.conf")
765 if os.path.exists(confpath):
766 apt_pkg.ReadConfigFileISC(Cnf,default_config)
768 if Cnf.get("Config::" + res + "::AptConfig"):
769 return Cnf["Config::" + res + "::AptConfig"]
771 return default_apt_config
773 def which_alias_file():
774 hostname = socket.getfqdn()
775 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
776 if os.path.exists(aliasfn):
781 ################################################################################
783 def TemplateSubst(subst_map, filename):
784 """ Perform a substition of template """
785 templatefile = open_file(filename)
786 template = templatefile.read()
787 for k, v in subst_map.iteritems():
788 template = template.replace(k, str(v))
792 ################################################################################
794 def fubar(msg, exit_code=1):
795 sys.stderr.write("E: %s\n" % (msg))
799 sys.stderr.write("W: %s\n" % (msg))
801 ################################################################################
803 # Returns the user name with a laughable attempt at rfc822 conformancy
804 # (read: removing stray periods).
806 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
809 return pwd.getpwuid(os.getuid())[0]
811 ################################################################################
821 return ("%d%s" % (c, t))
823 ################################################################################
825 def cc_fix_changes (changes):
826 o = changes.get("architecture", "")
828 del changes["architecture"]
829 changes["architecture"] = {}
831 changes["architecture"][j] = 1
833 def changes_compare (a, b):
834 """ Sort by source name, source version, 'have source', and then by filename """
836 a_changes = parse_changes(a)
841 b_changes = parse_changes(b)
845 cc_fix_changes (a_changes)
846 cc_fix_changes (b_changes)
848 # Sort by source name
849 a_source = a_changes.get("source")
850 b_source = b_changes.get("source")
851 q = cmp (a_source, b_source)
855 # Sort by source version
856 a_version = a_changes.get("version", "0")
857 b_version = b_changes.get("version", "0")
858 q = apt_pkg.VersionCompare(a_version, b_version)
862 # Sort by 'have source'
863 a_has_source = a_changes["architecture"].get("source")
864 b_has_source = b_changes["architecture"].get("source")
865 if a_has_source and not b_has_source:
867 elif b_has_source and not a_has_source:
870 # Fall back to sort by filename
873 ################################################################################
875 def find_next_free (dest, too_many=100):
878 while os.path.exists(dest) and extra < too_many:
879 dest = orig_dest + '.' + repr(extra)
881 if extra >= too_many:
882 raise NoFreeFilenameError
885 ################################################################################
887 def result_join (original, sep = '\t'):
889 for i in xrange(len(original)):
890 if original[i] == None:
891 resultlist.append("")
893 resultlist.append(original[i])
894 return sep.join(resultlist)
896 ################################################################################
898 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
900 for line in str.split('\n'):
902 if line or include_blank_lines:
903 out += "%s%s\n" % (prefix, line)
904 # Strip trailing new line
909 ################################################################################
911 def validate_changes_file_arg(filename, require_changes=1):
913 'filename' is either a .changes or .dak file. If 'filename' is a
914 .dak file, it's changed to be the corresponding .changes file. The
915 function then checks if the .changes file a) exists and b) is
916 readable and returns the .changes filename if so. If there's a
917 problem, the next action depends on the option 'require_changes'
920 - If 'require_changes' == -1, errors are ignored and the .changes
921 filename is returned.
922 - If 'require_changes' == 0, a warning is given and 'None' is returned.
923 - If 'require_changes' == 1, a fatal error is raised.
928 orig_filename = filename
929 if filename.endswith(".dak"):
930 filename = filename[:-4]+".changes"
932 if not filename.endswith(".changes"):
933 error = "invalid file type; not a changes file"
935 if not os.access(filename,os.R_OK):
936 if os.path.exists(filename):
937 error = "permission denied"
939 error = "file not found"
942 if require_changes == 1:
943 fubar("%s: %s." % (orig_filename, error))
944 elif require_changes == 0:
945 warn("Skipping %s - %s" % (orig_filename, error))
947 else: # We only care about the .dak file
952 ################################################################################
955 return (arch != "source" and arch != "all")
957 ################################################################################
959 def join_with_commas_and(list):
960 if len(list) == 0: return "nothing"
961 if len(list) == 1: return list[0]
962 return ", ".join(list[:-1]) + " and " + list[-1]
964 ################################################################################
969 (pkg, version, constraint) = atom
971 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
974 pp_deps.append(pp_dep)
975 return " |".join(pp_deps)
977 ################################################################################
982 ################################################################################
984 def parse_args(Options):
985 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
986 # XXX: This should go away and everything which calls it be converted
987 # to use SQLA properly. For now, we'll just fix it not to use
988 # the old Pg interface though
989 session = DBConn().session()
993 for suitename in split_args(Options["Suite"]):
994 suite = get_suite(suitename, session=session)
995 if suite.suite_id is None:
996 warn("suite '%s' not recognised." % (suite.suite_name))
998 suite_ids_list.append(suite.suite_id)
1000 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1002 fubar("No valid suite given.")
1007 if Options["Component"]:
1008 component_ids_list = []
1009 for componentname in split_args(Options["Component"]):
1010 component = get_component(componentname, session=session)
1011 if component is None:
1012 warn("component '%s' not recognised." % (componentname))
1014 component_ids_list.append(component.component_id)
1015 if component_ids_list:
1016 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1018 fubar("No valid component given.")
1022 # Process architecture
1023 con_architectures = ""
1025 if Options["Architecture"]:
1027 for archname in split_args(Options["Architecture"]):
1028 if archname == "source":
1031 arch = get_architecture(archname, session=session)
1033 warn("architecture '%s' not recognised." % (archname))
1035 arch_ids_list.append(arch.arch_id)
1037 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1039 if not check_source:
1040 fubar("No valid architecture given.")
1044 return (con_suites, con_architectures, con_components, check_source)
1046 ################################################################################
1048 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1049 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1052 tb = sys.exc_info()[2]
1059 frame = frame.f_back
1061 traceback.print_exc()
1063 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1064 frame.f_code.co_filename,
1066 for key, value in frame.f_locals.items():
1067 print "\t%20s = " % key,
1071 print "<unable to print>"
1073 ################################################################################
1075 def try_with_debug(function):
1083 ################################################################################
1085 def arch_compare_sw (a, b):
1087 Function for use in sorting lists of architectures.
1089 Sorts normally except that 'source' dominates all others.
1092 if a == "source" and b == "source":
1101 ################################################################################
1103 def split_args (s, dwim=1):
1105 Split command line arguments which can be separated by either commas
1106 or whitespace. If dwim is set, it will complain about string ending
1107 in comma since this usually means someone did 'dak ls -a i386, m68k
1108 foo' or something and the inevitable confusion resulting from 'm68k'
1109 being treated as an argument is undesirable.
1112 if s.find(",") == -1:
1115 if s[-1:] == "," and dwim:
1116 fubar("split_args: found trailing comma, spurious space maybe?")
1119 ################################################################################
1121 def gpgv_get_status_output(cmd, status_read, status_write):
1123 Our very own version of commands.getouputstatus(), hacked to support
1127 cmd = ['/bin/sh', '-c', cmd]
1128 p2cread, p2cwrite = os.pipe()
1129 c2pread, c2pwrite = os.pipe()
1130 errout, errin = os.pipe()
1140 for i in range(3, 256):
1141 if i != status_write:
1147 os.execvp(cmd[0], cmd)
1153 os.dup2(c2pread, c2pwrite)
1154 os.dup2(errout, errin)
1156 output = status = ""
1158 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1161 r = os.read(fd, 8196)
1163 more_data.append(fd)
1164 if fd == c2pwrite or fd == errin:
1166 elif fd == status_read:
1169 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1171 pid, exit_status = os.waitpid(pid, 0)
1173 os.close(status_write)
1174 os.close(status_read)
1184 return output, status, exit_status
1186 ################################################################################
1188 def process_gpgv_output(status):
1189 # Process the status-fd output
1192 for line in status.split('\n'):
1196 split = line.split()
1198 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1200 (gnupg, keyword) = split[:2]
1201 if gnupg != "[GNUPG:]":
1202 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1205 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1206 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1209 keywords[keyword] = args
1211 return (keywords, internal_error)
1213 ################################################################################
1215 def retrieve_key (filename, keyserver=None, keyring=None):
1217 Retrieve the key that signed 'filename' from 'keyserver' and
1218 add it to 'keyring'. Returns nothing on success, or an error message
1222 # Defaults for keyserver and keyring
1224 keyserver = Cnf["Dinstall::KeyServer"]
1226 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1228 # Ensure the filename contains no shell meta-characters or other badness
1229 if not re_taint_free.match(filename):
1230 return "%s: tainted filename" % (filename)
1232 # Invoke gpgv on the file
1233 status_read, status_write = os.pipe()
1234 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1235 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1237 # Process the status-fd output
1238 (keywords, internal_error) = process_gpgv_output(status)
1240 return internal_error
1242 if not keywords.has_key("NO_PUBKEY"):
1243 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1245 fingerprint = keywords["NO_PUBKEY"][0]
1246 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1247 # it'll try to create a lockfile in /dev. A better solution might
1248 # be a tempfile or something.
1249 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1250 % (Cnf["Dinstall::SigningKeyring"])
1251 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1252 % (keyring, keyserver, fingerprint)
1253 (result, output) = commands.getstatusoutput(cmd)
1255 return "'%s' failed with exit code %s" % (cmd, result)
1259 ################################################################################
1261 def gpg_keyring_args(keyrings=None):
1263 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1265 return " ".join(["--keyring %s" % x for x in keyrings])
1267 ################################################################################
1269 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None):
1271 Check the signature of a file and return the fingerprint if the
1272 signature is valid or 'None' if it's not. The first argument is the
1273 filename whose signature should be checked. The second argument is a
1274 reject function and is called when an error is found. The reject()
1275 function must allow for two arguments: the first is the error message,
1276 the second is an optional prefix string. It's possible for reject()
1277 to be called more than once during an invocation of check_signature().
1278 The third argument is optional and is the name of the files the
1279 detached signature applies to. The fourth argument is optional and is
1280 a *list* of keyrings to use. 'autofetch' can either be None, True or
1281 False. If None, the default behaviour specified in the config will be
1287 # Ensure the filename contains no shell meta-characters or other badness
1288 if not re_taint_free.match(sig_filename):
1289 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1290 return (None, rejects)
1292 if data_filename and not re_taint_free.match(data_filename):
1293 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1294 return (None, rejects)
1297 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1299 # Autofetch the signing key if that's enabled
1300 if autofetch == None:
1301 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1303 error_msg = retrieve_key(sig_filename)
1305 rejects.append(error_msg)
1306 return (None, rejects)
1308 # Build the command line
1309 status_read, status_write = os.pipe()
1310 cmd = "gpgv --status-fd %s %s %s %s" % (
1311 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1313 # Invoke gpgv on the file
1314 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1316 # Process the status-fd output
1317 (keywords, internal_error) = process_gpgv_output(status)
1319 # If we failed to parse the status-fd output, let's just whine and bail now
1321 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1322 rejects.append(internal_error, "")
1323 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1324 return (None, rejects)
1326 # Now check for obviously bad things in the processed output
1327 if keywords.has_key("KEYREVOKED"):
1328 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1329 if keywords.has_key("BADSIG"):
1330 rejects.append("bad signature on %s." % (sig_filename))
1331 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1332 rejects.append("failed to check signature on %s." % (sig_filename))
1333 if keywords.has_key("NO_PUBKEY"):
1334 args = keywords["NO_PUBKEY"]
1337 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1338 if keywords.has_key("BADARMOR"):
1339 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1340 if keywords.has_key("NODATA"):
1341 rejects.append("no signature found in %s." % (sig_filename))
1342 if keywords.has_key("EXPKEYSIG"):
1343 args = keywords["EXPKEYSIG"]
1346 rejects.append("Signature made by expired key 0x%s" % (key))
1347 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1348 args = keywords["KEYEXPIRED"]
1352 if timestamp.count("T") == 0:
1354 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1356 expiredate = "unknown (%s)" % (timestamp)
1358 expiredate = timestamp
1359 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1361 if len(rejects) > 0:
1362 return (None, rejects)
1364 # Next check gpgv exited with a zero return code
1366 rejects.append("gpgv failed while checking %s." % (sig_filename))
1368 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1370 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1371 return (None, rejects)
1373 # Sanity check the good stuff we expect
1374 if not keywords.has_key("VALIDSIG"):
1375 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1377 args = keywords["VALIDSIG"]
1379 rejects.append("internal error while checking signature on %s." % (sig_filename))
1381 fingerprint = args[0]
1382 if not keywords.has_key("GOODSIG"):
1383 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1384 if not keywords.has_key("SIG_ID"):
1385 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1387 # Finally ensure there's not something we don't recognise
1388 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1389 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1390 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1392 for keyword in keywords.keys():
1393 if not known_keywords.has_key(keyword):
1394 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1396 if len(rejects) > 0:
1397 return (None, rejects)
1399 return (fingerprint, [])
1401 ################################################################################
1403 def gpg_get_key_addresses(fingerprint):
1404 """retreive email addresses from gpg key uids for a given fingerprint"""
1405 addresses = key_uid_email_cache.get(fingerprint)
1406 if addresses != None:
1409 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1410 % (gpg_keyring_args(), fingerprint)
1411 (result, output) = commands.getstatusoutput(cmd)
1413 for l in output.split('\n'):
1414 m = re_gpg_uid.match(l)
1416 addresses.add(m.group(1))
1417 key_uid_email_cache[fingerprint] = addresses
1420 ################################################################################
1422 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1424 def wrap(paragraph, max_length, prefix=""):
1428 words = paragraph.split()
1431 word_size = len(word)
1432 if word_size > max_length:
1434 s += line + '\n' + prefix
1435 s += word + '\n' + prefix
1438 new_length = len(line) + word_size + 1
1439 if new_length > max_length:
1440 s += line + '\n' + prefix
1453 ################################################################################
1455 def clean_symlink (src, dest, root):
1457 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1460 src = src.replace(root, '', 1)
1461 dest = dest.replace(root, '', 1)
1462 dest = os.path.dirname(dest)
1463 new_src = '../' * len(dest.split('/'))
1464 return new_src + src
1466 ################################################################################
1468 def temp_filename(directory=None, prefix="dak", suffix=""):
1470 Return a secure and unique filename by pre-creating it.
1471 If 'directory' is non-null, it will be the directory the file is pre-created in.
1472 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1473 If 'suffix' is non-null, the filename will end with it.
1475 Returns a pair (fd, name).
1478 return tempfile.mkstemp(suffix, prefix, directory)
1480 ################################################################################
1482 def temp_dirname(parent=None, prefix="dak", suffix=""):
1484 Return a secure and unique directory by pre-creating it.
1485 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1486 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1487 If 'suffix' is non-null, the filename will end with it.
1489 Returns a pathname to the new directory
1492 return tempfile.mkdtemp(suffix, prefix, parent)
1494 ################################################################################
1496 def is_email_alias(email):
1497 """ checks if the user part of the email is listed in the alias file """
1499 if alias_cache == None:
1500 aliasfn = which_alias_file()
1503 for l in open(aliasfn):
1504 alias_cache.add(l.split(':')[0])
1505 uid = email.split('@')[0]
1506 return uid in alias_cache
1508 ################################################################################
1510 def get_changes_files(from_dir):
1512 Takes a directory and lists all .changes files in it (as well as chdir'ing
1513 to the directory; this is due to broken behaviour on the part of p-u/p-a
1514 when you're not in the right place)
1516 Returns a list of filenames
1519 # Much of the rest of p-u/p-a depends on being in the right place
1521 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1523 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1525 return changes_files
1527 ################################################################################
1531 Cnf = apt_pkg.newConfiguration()
1532 if not os.getenv("DAK_TEST"):
1533 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1535 if which_conf_file() != default_config:
1536 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())