2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite, get_override_type, Keyring, session_wrapper
43 from dak_exceptions import *
44 from gpg import SignedFile
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from formats import parse_format, validate_changes_format
52 from srcformats import get_format_from_string
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
68 # code in lenny's Python. This also affects commands.getoutput and
70 def dak_getstatusoutput(cmd):
71 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
72 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
74 output = pipe.stdout.read()
78 if output[-1:] == '\n':
86 commands.getstatusoutput = dak_getstatusoutput
88 ################################################################################
91 """ Escape html chars """
92 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
94 ################################################################################
96 def open_file(filename, mode='r'):
98 Open C{file}, return fileobject.
100 @type filename: string
101 @param filename: path/filename to open
104 @param mode: open mode
107 @return: open fileobject
109 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
113 f = open(filename, mode)
115 raise CantOpenError, filename
118 ################################################################################
120 def our_raw_input(prompt=""):
124 sys.stdout.write(prompt)
133 sys.stderr.write("\nUser interrupt (^D).\n")
136 ################################################################################
138 def extract_component_from_section(section):
141 if section.find('/') != -1:
142 component = section.split('/')[0]
144 # Expand default component
146 if Cnf.has_key("Component::%s" % section):
151 return (section, component)
153 ################################################################################
155 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
156 require_signature = True
159 require_signature = False
161 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
162 contents = signed_file.contents
167 # Split the lines in the input, keeping the linebreaks.
168 lines = contents.splitlines(True)
171 raise ParseChangesError, "[Empty changes file]"
173 # Reindex by line number so we can easily verify the format of
179 indexed_lines[index] = line[:-1]
181 num_of_lines = len(indexed_lines.keys())
184 while index < num_of_lines:
186 line = indexed_lines[index]
187 if line == "" and signing_rules == 1:
188 if index != num_of_lines:
189 raise InvalidDscError, index
191 slf = re_single_line_field.match(line)
193 field = slf.groups()[0].lower()
194 changes[field] = slf.groups()[1]
198 changes[field] += '\n'
200 mlf = re_multi_line_field.match(line)
203 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
204 if first == 1 and changes[field] != "":
205 changes[field] += '\n'
207 changes[field] += mlf.groups()[0] + '\n'
211 changes["filecontents"] = armored_contents
213 if changes.has_key("source"):
214 # Strip the source version in brackets from the source field,
215 # put it in the "source-version" field instead.
216 srcver = re_srchasver.search(changes["source"])
218 changes["source"] = srcver.group(1)
219 changes["source-version"] = srcver.group(2)
222 raise ParseChangesError, error
226 ################################################################################
228 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
230 Parses a changes file and returns a dictionary where each field is a
231 key. The mandatory first argument is the filename of the .changes
234 signing_rules is an optional argument:
236 - If signing_rules == -1, no signature is required.
237 - If signing_rules == 0 (the default), a signature is required.
238 - If signing_rules == 1, it turns on the same strict format checking
241 The rules for (signing_rules == 1)-mode are:
243 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
244 followed by any PGP header data and must end with a blank line.
246 - The data section must end with a blank line and must be followed by
247 "-----BEGIN PGP SIGNATURE-----".
250 changes_in = open_file(filename)
251 content = changes_in.read()
254 unicode(content, 'utf-8')
256 raise ChangesUnicodeError, "Changes file not proper utf-8"
257 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
261 # Finally ensure that everything needed for .changes is there
262 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
263 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
266 for keyword in must_keywords:
267 if not changes.has_key(keyword.lower()):
268 missingfields.append(keyword)
270 if len(missingfields):
271 raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)
275 ################################################################################
277 def hash_key(hashname):
278 return '%ssum' % hashname
280 ################################################################################
282 def create_hash(where, files, hashname, hashfunc):
284 create_hash extends the passed files dict with the given hash by
285 iterating over all files on disk and passing them to the hashing
290 for f in files.keys():
292 file_handle = open_file(f)
293 except CantOpenError:
294 rejmsg.append("Could not open file %s for checksumming" % (f))
297 files[f][hash_key(hashname)] = hashfunc(file_handle)
302 ################################################################################
304 def check_hash(where, files, hashname, hashfunc):
306 check_hash checks the given hash in the files dict against the actual
307 files on disk. The hash values need to be present consistently in
308 all file entries. It does not modify its input in any way.
312 for f in files.keys():
316 file_handle = open_file(f)
318 # Check for the hash entry, to not trigger a KeyError.
319 if not files[f].has_key(hash_key(hashname)):
320 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
324 # Actually check the hash for correctness.
325 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
326 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
328 except CantOpenError:
329 # TODO: This happens when the file is in the pool.
330 # warn("Cannot open file %s" % f)
337 ################################################################################
339 def check_size(where, files):
341 check_size checks the file sizes in the passed files dict against the
346 for f in files.keys():
351 # TODO: This happens when the file is in the pool.
355 actual_size = entry[stat.ST_SIZE]
356 size = int(files[f]["size"])
357 if size != actual_size:
358 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
359 % (f, actual_size, size, where))
362 ################################################################################
364 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
366 Verify that the files listed in the Files field of the .dsc are
367 those expected given the announced Format.
369 @type dsc_filename: string
370 @param dsc_filename: path of .dsc file
373 @param dsc: the content of the .dsc parsed by C{parse_changes()}
375 @type dsc_files: dict
376 @param dsc_files: the file list returned by C{build_file_list()}
379 @return: all errors detected
383 # Parse the file if needed
385 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
387 if dsc_files is None:
388 dsc_files = build_file_list(dsc, is_a_dsc=1)
390 # Ensure .dsc lists proper set of source files according to the format
392 has = defaultdict(lambda: 0)
395 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
396 (r'diff.gz', ('debian_diff',)),
397 (r'tar.gz', ('native_tar_gz', 'native_tar')),
398 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
399 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
400 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
401 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
404 for f in dsc_files.keys():
405 m = re_issource.match(f)
407 rejmsg.append("%s: %s in Files field not recognised as source."
411 # Populate 'has' dictionary by resolving keys in lookup table
413 for regex, keys in ftype_lookup:
414 if re.match(regex, m.group(3)):
420 # File does not match anything in lookup table; reject
422 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
424 # Check for multiple files
425 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
426 if has[file_type] > 1:
427 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
429 # Source format specific tests
431 format = get_format_from_string(dsc['format'])
433 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
436 except UnknownFormatError:
437 # Not an error here for now
442 ################################################################################
444 def check_hash_fields(what, manifest):
446 check_hash_fields ensures that there are no checksum fields in the
447 given dict that we do not know about.
451 hashes = map(lambda x: x[0], known_hashes)
452 for field in manifest:
453 if field.startswith("checksums-"):
454 hashname = field.split("-",1)[1]
455 if hashname not in hashes:
456 rejmsg.append("Unsupported checksum field for %s "\
457 "in %s" % (hashname, what))
460 ################################################################################
462 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
463 if format >= version:
464 # The version should contain the specified hash.
467 # Import hashes from the changes
468 rejmsg = parse_checksums(".changes", files, changes, hashname)
472 # We need to calculate the hash because it can't possibly
475 return func(".changes", files, hashname, hashfunc)
477 # We could add the orig which might be in the pool to the files dict to
478 # access the checksums easily.
480 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
482 ensure_dsc_hashes' task is to ensure that each and every *present* hash
483 in the dsc is correct, i.e. identical to the changes file and if necessary
484 the pool. The latter task is delegated to check_hash.
488 if not dsc.has_key('Checksums-%s' % (hashname,)):
490 # Import hashes from the dsc
491 parse_checksums(".dsc", dsc_files, dsc, hashname)
493 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
496 ################################################################################
498 def parse_checksums(where, files, manifest, hashname):
500 field = 'checksums-%s' % hashname
501 if not field in manifest:
503 for line in manifest[field].split('\n'):
506 clist = line.strip().split(' ')
508 checksum, size, checkfile = clist
510 rejmsg.append("Cannot parse checksum line [%s]" % (line))
512 if not files.has_key(checkfile):
513 # TODO: check for the file's entry in the original files dict, not
514 # the one modified by (auto)byhand and other weird stuff
515 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
516 # (file, hashname, where))
518 if not files[checkfile]["size"] == size:
519 rejmsg.append("%s: size differs for files and checksums-%s entry "\
520 "in %s" % (checkfile, hashname, where))
522 files[checkfile][hash_key(hashname)] = checksum
523 for f in files.keys():
524 if not files[f].has_key(hash_key(hashname)):
525 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
529 ################################################################################
531 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
533 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
536 # Make sure we have a Files: field to parse...
537 if not changes.has_key(field):
538 raise NoFilesFieldError
540 # Validate .changes Format: field
542 validate_changes_format(parse_format(changes['format']), field)
544 includes_section = (not is_a_dsc) and field == "files"
546 # Parse each entry/line:
547 for i in changes[field].split('\n'):
551 section = priority = ""
554 (md5, size, section, priority, name) = s
556 (md5, size, name) = s
558 raise ParseChangesError, i
565 (section, component) = extract_component_from_section(section)
567 files[name] = dict(size=size, section=section,
568 priority=priority, component=component)
569 files[name][hashname] = md5
573 ################################################################################
575 # see http://bugs.debian.org/619131
576 def build_package_set(dsc, session = None):
577 if not dsc.has_key("package-set"):
582 for line in dsc["package-set"].split("\n"):
586 (name, section, priority) = line.split()
587 (section, component) = extract_component_from_section(section)
590 if name.find(":") != -1:
591 (package_type, name) = name.split(":", 1)
592 if package_type == "src":
595 # Validate type if we have a session
596 if session and get_override_type(package_type, session) is None:
597 # Maybe just warn and ignore? exit(1) might be a bit hard...
598 utils.fubar("invalid type (%s) in Package-Set." % (package_type))
605 if package_type == "dsc":
608 if not packages.has_key(name) or packages[name]["type"] == "dsc":
609 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
613 ################################################################################
615 def send_mail (message, filename=""):
616 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
618 # Check whether we're supposed to be sending mail
619 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
622 # If we've been passed a string dump it into a temporary file
624 (fd, filename) = tempfile.mkstemp()
625 os.write (fd, message)
628 if Cnf.has_key("Dinstall::MailWhiteList") and \
629 Cnf["Dinstall::MailWhiteList"] != "":
630 message_in = open_file(filename)
631 message_raw = modemail.message_from_file(message_in)
635 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
637 for line in whitelist_in:
638 if not re_whitespace_comment.match(line):
639 if re_re_mark.match(line):
640 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
642 whitelist.append(re.compile(re.escape(line.strip())))
647 fields = ["To", "Bcc", "Cc"]
650 value = message_raw.get(field, None)
653 for item in value.split(","):
654 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
660 if not mail_whitelisted:
661 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
665 # Doesn't have any mail in whitelist so remove the header
667 del message_raw[field]
669 message_raw.replace_header(field, ', '.join(match))
671 # Change message fields in order if we don't have a To header
672 if not message_raw.has_key("To"):
675 if message_raw.has_key(field):
676 message_raw[fields[-1]] = message_raw[field]
677 del message_raw[field]
680 # Clean up any temporary files
681 # and return, as we removed all recipients.
683 os.unlink (filename);
686 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
687 os.write (fd, message_raw.as_string(True));
691 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
693 raise SendmailFailedError, output
695 # Clean up any temporary files
699 ################################################################################
701 def poolify (source, component):
704 if source[:3] == "lib":
705 return component + source[:4] + '/' + source + '/'
707 return component + source[:1] + '/' + source + '/'
709 ################################################################################
711 def move (src, dest, overwrite = 0, perms = 0664):
712 if os.path.exists(dest) and os.path.isdir(dest):
715 dest_dir = os.path.dirname(dest)
716 if not os.path.exists(dest_dir):
717 umask = os.umask(00000)
718 os.makedirs(dest_dir, 02775)
720 #print "Moving %s to %s..." % (src, dest)
721 if os.path.exists(dest) and os.path.isdir(dest):
722 dest += '/' + os.path.basename(src)
723 # Don't overwrite unless forced to
724 if os.path.exists(dest):
726 fubar("Can't move %s to %s - file already exists." % (src, dest))
728 if not os.access(dest, os.W_OK):
729 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
730 shutil.copy2(src, dest)
731 os.chmod(dest, perms)
734 def copy (src, dest, overwrite = 0, perms = 0664):
735 if os.path.exists(dest) and os.path.isdir(dest):
738 dest_dir = os.path.dirname(dest)
739 if not os.path.exists(dest_dir):
740 umask = os.umask(00000)
741 os.makedirs(dest_dir, 02775)
743 #print "Copying %s to %s..." % (src, dest)
744 if os.path.exists(dest) and os.path.isdir(dest):
745 dest += '/' + os.path.basename(src)
746 # Don't overwrite unless forced to
747 if os.path.exists(dest):
749 raise FileExistsError
751 if not os.access(dest, os.W_OK):
752 raise CantOverwriteError
753 shutil.copy2(src, dest)
754 os.chmod(dest, perms)
756 ################################################################################
759 res = socket.getfqdn()
760 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
761 if database_hostname:
762 return database_hostname
766 def which_conf_file ():
767 if os.getenv('DAK_CONFIG'):
768 return os.getenv('DAK_CONFIG')
770 res = socket.getfqdn()
771 # In case we allow local config files per user, try if one exists
772 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
773 homedir = os.getenv("HOME")
774 confpath = os.path.join(homedir, "/etc/dak.conf")
775 if os.path.exists(confpath):
776 apt_pkg.ReadConfigFileISC(Cnf,default_config)
778 # We are still in here, so there is no local config file or we do
779 # not allow local files. Do the normal stuff.
780 if Cnf.get("Config::" + res + "::DakConfig"):
781 return Cnf["Config::" + res + "::DakConfig"]
783 return default_config
785 def which_apt_conf_file ():
786 res = socket.getfqdn()
787 # In case we allow local config files per user, try if one exists
788 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
789 homedir = os.getenv("HOME")
790 confpath = os.path.join(homedir, "/etc/dak.conf")
791 if os.path.exists(confpath):
792 apt_pkg.ReadConfigFileISC(Cnf,default_config)
794 if Cnf.get("Config::" + res + "::AptConfig"):
795 return Cnf["Config::" + res + "::AptConfig"]
797 return default_apt_config
799 def which_alias_file():
800 hostname = socket.getfqdn()
801 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
802 if os.path.exists(aliasfn):
807 ################################################################################
809 def TemplateSubst(subst_map, filename):
810 """ Perform a substition of template """
811 templatefile = open_file(filename)
812 template = templatefile.read()
813 for k, v in subst_map.iteritems():
814 template = template.replace(k, str(v))
818 ################################################################################
820 def fubar(msg, exit_code=1):
821 sys.stderr.write("E: %s\n" % (msg))
825 sys.stderr.write("W: %s\n" % (msg))
827 ################################################################################
829 # Returns the user name with a laughable attempt at rfc822 conformancy
830 # (read: removing stray periods).
832 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
835 return pwd.getpwuid(os.getuid())[0]
837 ################################################################################
847 return ("%d%s" % (c, t))
849 ################################################################################
851 def cc_fix_changes (changes):
852 o = changes.get("architecture", "")
854 del changes["architecture"]
855 changes["architecture"] = {}
857 changes["architecture"][j] = 1
859 def changes_compare (a, b):
860 """ Sort by source name, source version, 'have source', and then by filename """
862 a_changes = parse_changes(a)
867 b_changes = parse_changes(b)
871 cc_fix_changes (a_changes)
872 cc_fix_changes (b_changes)
874 # Sort by source name
875 a_source = a_changes.get("source")
876 b_source = b_changes.get("source")
877 q = cmp (a_source, b_source)
881 # Sort by source version
882 a_version = a_changes.get("version", "0")
883 b_version = b_changes.get("version", "0")
884 q = apt_pkg.VersionCompare(a_version, b_version)
888 # Sort by 'have source'
889 a_has_source = a_changes["architecture"].get("source")
890 b_has_source = b_changes["architecture"].get("source")
891 if a_has_source and not b_has_source:
893 elif b_has_source and not a_has_source:
896 # Fall back to sort by filename
899 ################################################################################
901 def find_next_free (dest, too_many=100):
904 while os.path.exists(dest) and extra < too_many:
905 dest = orig_dest + '.' + repr(extra)
907 if extra >= too_many:
908 raise NoFreeFilenameError
911 ################################################################################
913 def result_join (original, sep = '\t'):
915 for i in xrange(len(original)):
916 if original[i] == None:
917 resultlist.append("")
919 resultlist.append(original[i])
920 return sep.join(resultlist)
922 ################################################################################
924 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
926 for line in str.split('\n'):
928 if line or include_blank_lines:
929 out += "%s%s\n" % (prefix, line)
930 # Strip trailing new line
935 ################################################################################
937 def validate_changes_file_arg(filename, require_changes=1):
939 'filename' is either a .changes or .dak file. If 'filename' is a
940 .dak file, it's changed to be the corresponding .changes file. The
941 function then checks if the .changes file a) exists and b) is
942 readable and returns the .changes filename if so. If there's a
943 problem, the next action depends on the option 'require_changes'
946 - If 'require_changes' == -1, errors are ignored and the .changes
947 filename is returned.
948 - If 'require_changes' == 0, a warning is given and 'None' is returned.
949 - If 'require_changes' == 1, a fatal error is raised.
954 orig_filename = filename
955 if filename.endswith(".dak"):
956 filename = filename[:-4]+".changes"
958 if not filename.endswith(".changes"):
959 error = "invalid file type; not a changes file"
961 if not os.access(filename,os.R_OK):
962 if os.path.exists(filename):
963 error = "permission denied"
965 error = "file not found"
968 if require_changes == 1:
969 fubar("%s: %s." % (orig_filename, error))
970 elif require_changes == 0:
971 warn("Skipping %s - %s" % (orig_filename, error))
973 else: # We only care about the .dak file
978 ################################################################################
981 return (arch != "source" and arch != "all")
983 ################################################################################
985 def join_with_commas_and(list):
986 if len(list) == 0: return "nothing"
987 if len(list) == 1: return list[0]
988 return ", ".join(list[:-1]) + " and " + list[-1]
990 ################################################################################
995 (pkg, version, constraint) = atom
997 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
1000 pp_deps.append(pp_dep)
1001 return " |".join(pp_deps)
1003 ################################################################################
1008 ################################################################################
1010 def parse_args(Options):
1011 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1012 # XXX: This should go away and everything which calls it be converted
1013 # to use SQLA properly. For now, we'll just fix it not to use
1014 # the old Pg interface though
1015 session = DBConn().session()
1017 if Options["Suite"]:
1019 for suitename in split_args(Options["Suite"]):
1020 suite = get_suite(suitename, session=session)
1021 if suite.suite_id is None:
1022 warn("suite '%s' not recognised." % (suite.suite_name))
1024 suite_ids_list.append(suite.suite_id)
1026 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1028 fubar("No valid suite given.")
1033 if Options["Component"]:
1034 component_ids_list = []
1035 for componentname in split_args(Options["Component"]):
1036 component = get_component(componentname, session=session)
1037 if component is None:
1038 warn("component '%s' not recognised." % (componentname))
1040 component_ids_list.append(component.component_id)
1041 if component_ids_list:
1042 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1044 fubar("No valid component given.")
1048 # Process architecture
1049 con_architectures = ""
1051 if Options["Architecture"]:
1053 for archname in split_args(Options["Architecture"]):
1054 if archname == "source":
1057 arch = get_architecture(archname, session=session)
1059 warn("architecture '%s' not recognised." % (archname))
1061 arch_ids_list.append(arch.arch_id)
1063 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1065 if not check_source:
1066 fubar("No valid architecture given.")
1070 return (con_suites, con_architectures, con_components, check_source)
1072 ################################################################################
1074 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1075 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1078 tb = sys.exc_info()[2]
1085 frame = frame.f_back
1087 traceback.print_exc()
1089 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1090 frame.f_code.co_filename,
1092 for key, value in frame.f_locals.items():
1093 print "\t%20s = " % key,
1097 print "<unable to print>"
1099 ################################################################################
1101 def try_with_debug(function):
1109 ################################################################################
1111 def arch_compare_sw (a, b):
1113 Function for use in sorting lists of architectures.
1115 Sorts normally except that 'source' dominates all others.
1118 if a == "source" and b == "source":
1127 ################################################################################
1129 def split_args (s, dwim=1):
1131 Split command line arguments which can be separated by either commas
1132 or whitespace. If dwim is set, it will complain about string ending
1133 in comma since this usually means someone did 'dak ls -a i386, m68k
1134 foo' or something and the inevitable confusion resulting from 'm68k'
1135 being treated as an argument is undesirable.
1138 if s.find(",") == -1:
1141 if s[-1:] == "," and dwim:
1142 fubar("split_args: found trailing comma, spurious space maybe?")
1145 ################################################################################
1147 def gpgv_get_status_output(cmd, status_read, status_write):
1149 Our very own version of commands.getouputstatus(), hacked to support
1153 cmd = ['/bin/sh', '-c', cmd]
1154 p2cread, p2cwrite = os.pipe()
1155 c2pread, c2pwrite = os.pipe()
1156 errout, errin = os.pipe()
1166 for i in range(3, 256):
1167 if i != status_write:
1173 os.execvp(cmd[0], cmd)
1179 os.dup2(c2pread, c2pwrite)
1180 os.dup2(errout, errin)
1182 output = status = ""
1184 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1187 r = os.read(fd, 8196)
1189 more_data.append(fd)
1190 if fd == c2pwrite or fd == errin:
1192 elif fd == status_read:
1195 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1197 pid, exit_status = os.waitpid(pid, 0)
1199 os.close(status_write)
1200 os.close(status_read)
1210 return output, status, exit_status
1212 ################################################################################
1214 def process_gpgv_output(status):
1215 # Process the status-fd output
1218 for line in status.split('\n'):
1222 split = line.split()
1224 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1226 (gnupg, keyword) = split[:2]
1227 if gnupg != "[GNUPG:]":
1228 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1231 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1232 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1235 keywords[keyword] = args
1237 return (keywords, internal_error)
1239 ################################################################################
1241 def retrieve_key (filename, keyserver=None, keyring=None):
1243 Retrieve the key that signed 'filename' from 'keyserver' and
1244 add it to 'keyring'. Returns nothing on success, or an error message
1248 # Defaults for keyserver and keyring
1250 keyserver = Cnf["Dinstall::KeyServer"]
1252 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1254 # Ensure the filename contains no shell meta-characters or other badness
1255 if not re_taint_free.match(filename):
1256 return "%s: tainted filename" % (filename)
1258 # Invoke gpgv on the file
1259 status_read, status_write = os.pipe()
1260 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1261 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1263 # Process the status-fd output
1264 (keywords, internal_error) = process_gpgv_output(status)
1266 return internal_error
1268 if not keywords.has_key("NO_PUBKEY"):
1269 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1271 fingerprint = keywords["NO_PUBKEY"][0]
1272 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1273 # it'll try to create a lockfile in /dev. A better solution might
1274 # be a tempfile or something.
1275 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1276 % (Cnf["Dinstall::SigningKeyring"])
1277 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1278 % (keyring, keyserver, fingerprint)
1279 (result, output) = commands.getstatusoutput(cmd)
1281 return "'%s' failed with exit code %s" % (cmd, result)
1285 ################################################################################
1287 def gpg_keyring_args(keyrings=None):
1289 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1291 return " ".join(["--keyring %s" % x for x in keyrings])
1293 ################################################################################
1295 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1297 Check the signature of a file and return the fingerprint if the
1298 signature is valid or 'None' if it's not. The first argument is the
1299 filename whose signature should be checked. The second argument is a
1300 reject function and is called when an error is found. The reject()
1301 function must allow for two arguments: the first is the error message,
1302 the second is an optional prefix string. It's possible for reject()
1303 to be called more than once during an invocation of check_signature().
1304 The third argument is optional and is the name of the files the
1305 detached signature applies to. The fourth argument is optional and is
1306 a *list* of keyrings to use. 'autofetch' can either be None, True or
1307 False. If None, the default behaviour specified in the config will be
1313 # Ensure the filename contains no shell meta-characters or other badness
1314 if not re_taint_free.match(sig_filename):
1315 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1316 return (None, rejects)
1318 if data_filename and not re_taint_free.match(data_filename):
1319 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1320 return (None, rejects)
1323 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1325 # Autofetch the signing key if that's enabled
1326 if autofetch == None:
1327 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1329 error_msg = retrieve_key(sig_filename)
1331 rejects.append(error_msg)
1332 return (None, rejects)
1334 # Build the command line
1335 status_read, status_write = os.pipe()
1336 cmd = "gpgv --status-fd %s %s %s %s" % (
1337 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1339 # Invoke gpgv on the file
1340 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1342 # Process the status-fd output
1343 (keywords, internal_error) = process_gpgv_output(status)
1345 # If we failed to parse the status-fd output, let's just whine and bail now
1347 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1348 rejects.append(internal_error, "")
1349 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1350 return (None, rejects)
1352 # Now check for obviously bad things in the processed output
1353 if keywords.has_key("KEYREVOKED"):
1354 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1355 if keywords.has_key("BADSIG"):
1356 rejects.append("bad signature on %s." % (sig_filename))
1357 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1358 rejects.append("failed to check signature on %s." % (sig_filename))
1359 if keywords.has_key("NO_PUBKEY"):
1360 args = keywords["NO_PUBKEY"]
1363 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1364 if keywords.has_key("BADARMOR"):
1365 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1366 if keywords.has_key("NODATA"):
1367 rejects.append("no signature found in %s." % (sig_filename))
1368 if keywords.has_key("EXPKEYSIG"):
1369 args = keywords["EXPKEYSIG"]
1372 rejects.append("Signature made by expired key 0x%s" % (key))
1373 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1374 args = keywords["KEYEXPIRED"]
1378 if timestamp.count("T") == 0:
1380 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1382 expiredate = "unknown (%s)" % (timestamp)
1384 expiredate = timestamp
1385 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1387 if len(rejects) > 0:
1388 return (None, rejects)
1390 # Next check gpgv exited with a zero return code
1392 rejects.append("gpgv failed while checking %s." % (sig_filename))
1394 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1396 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1397 return (None, rejects)
1399 # Sanity check the good stuff we expect
1400 if not keywords.has_key("VALIDSIG"):
1401 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1403 args = keywords["VALIDSIG"]
1405 rejects.append("internal error while checking signature on %s." % (sig_filename))
1407 fingerprint = args[0]
1408 if not keywords.has_key("GOODSIG"):
1409 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1410 if not keywords.has_key("SIG_ID"):
1411 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1413 # Finally ensure there's not something we don't recognise
1414 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1415 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1416 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1418 for keyword in keywords.keys():
1419 if not known_keywords.has_key(keyword):
1420 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1422 if len(rejects) > 0:
1423 return (None, rejects)
1425 return (fingerprint, [])
1427 ################################################################################
1429 def gpg_get_key_addresses(fingerprint):
1430 """retreive email addresses from gpg key uids for a given fingerprint"""
1431 addresses = key_uid_email_cache.get(fingerprint)
1432 if addresses != None:
1435 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1436 % (gpg_keyring_args(), fingerprint)
1437 (result, output) = commands.getstatusoutput(cmd)
1439 for l in output.split('\n'):
1440 m = re_gpg_uid.match(l)
1442 addresses.add(m.group(1))
1443 key_uid_email_cache[fingerprint] = addresses
1446 ################################################################################
1448 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1450 def wrap(paragraph, max_length, prefix=""):
1454 words = paragraph.split()
1457 word_size = len(word)
1458 if word_size > max_length:
1460 s += line + '\n' + prefix
1461 s += word + '\n' + prefix
1464 new_length = len(line) + word_size + 1
1465 if new_length > max_length:
1466 s += line + '\n' + prefix
1479 ################################################################################
1481 def clean_symlink (src, dest, root):
1483 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1486 src = src.replace(root, '', 1)
1487 dest = dest.replace(root, '', 1)
1488 dest = os.path.dirname(dest)
1489 new_src = '../' * len(dest.split('/'))
1490 return new_src + src
1492 ################################################################################
1494 def temp_filename(directory=None, prefix="dak", suffix=""):
1496 Return a secure and unique filename by pre-creating it.
1497 If 'directory' is non-null, it will be the directory the file is pre-created in.
1498 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1499 If 'suffix' is non-null, the filename will end with it.
1501 Returns a pair (fd, name).
1504 return tempfile.mkstemp(suffix, prefix, directory)
1506 ################################################################################
1508 def temp_dirname(parent=None, prefix="dak", suffix=""):
1510 Return a secure and unique directory by pre-creating it.
1511 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1512 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1513 If 'suffix' is non-null, the filename will end with it.
1515 Returns a pathname to the new directory
1518 return tempfile.mkdtemp(suffix, prefix, parent)
1520 ################################################################################
1522 def is_email_alias(email):
1523 """ checks if the user part of the email is listed in the alias file """
1525 if alias_cache == None:
1526 aliasfn = which_alias_file()
1529 for l in open(aliasfn):
1530 alias_cache.add(l.split(':')[0])
1531 uid = email.split('@')[0]
1532 return uid in alias_cache
1534 ################################################################################
1536 def get_changes_files(from_dir):
1538 Takes a directory and lists all .changes files in it (as well as chdir'ing
1539 to the directory; this is due to broken behaviour on the part of p-u/p-a
1540 when you're not in the right place)
1542 Returns a list of filenames
1545 # Much of the rest of p-u/p-a depends on being in the right place
1547 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1549 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1551 return changes_files
1553 ################################################################################
1557 Cnf = apt_pkg.newConfiguration()
1558 if not os.getenv("DAK_TEST"):
1559 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1561 if which_conf_file() != default_config:
1562 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1564 ################################################################################
1566 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1568 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1569 Well, actually it parsed a local copy, but let's document the source
1572 returns a dict associating source package name with a list of open wnpp
1573 bugs (Yes, there might be more than one)
1579 lines = f.readlines()
1581 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1586 splited_line = line.split(": ", 1)
1587 if len(splited_line) > 1:
1588 wnpp[splited_line[0]] = splited_line[1].split("|")
1590 for source in wnpp.keys():
1592 for wnpp_bug in wnpp[source]:
1593 bug_no = re.search("(\d)+", wnpp_bug).group()
1599 ################################################################################
1601 def get_packages_from_ftp(root, suite, component, architecture):
1603 Returns an object containing apt_pkg-parseable data collected by
1604 aggregating Packages.gz files gathered for each architecture.
1607 @param root: path to ftp archive root directory
1610 @param suite: suite to extract files from
1612 @type component: string
1613 @param component: component to extract files from
1615 @type architecture: string
1616 @param architecture: architecture to extract files from
1619 @return: apt_pkg class containing package data
1622 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1623 (fd, temp_file) = temp_filename()
1624 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1626 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1627 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1628 if os.path.exists(filename):
1629 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1631 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1632 packages = open_file(temp_file)
1633 Packages = apt_pkg.ParseTagFile(packages)
1634 os.unlink(temp_file)