2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite, \
43 get_override_type, Keyring, session_wrapper, \
44 get_active_keyring_paths, get_primary_keyring_path
45 from sqlalchemy import desc
46 from dak_exceptions import *
47 from gpg import SignedFile
48 from textutils import fix_maintainer
49 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
50 re_multi_line_field, re_srchasver, re_taint_free, \
51 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
54 from formats import parse_format, validate_changes_format
55 from srcformats import get_format_from_string
56 from collections import defaultdict
58 ################################################################################
60 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
61 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
63 alias_cache = None #: Cache for email alias checks
64 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
66 # (hashname, function, earliest_changes_version)
67 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
68 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
70 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
71 # code in lenny's Python. This also affects commands.getoutput and
73 def dak_getstatusoutput(cmd):
74 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
75 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
77 output = pipe.stdout.read()
81 if output[-1:] == '\n':
89 commands.getstatusoutput = dak_getstatusoutput
91 ################################################################################
94 """ Escape html chars """
95 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
97 ################################################################################
99 def open_file(filename, mode='r'):
101 Open C{file}, return fileobject.
103 @type filename: string
104 @param filename: path/filename to open
107 @param mode: open mode
110 @return: open fileobject
112 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
116 f = open(filename, mode)
118 raise CantOpenError, filename
121 ################################################################################
123 def our_raw_input(prompt=""):
127 sys.stdout.write(prompt)
136 sys.stderr.write("\nUser interrupt (^D).\n")
139 ################################################################################
141 def extract_component_from_section(section, session=None):
144 if section.find('/') != -1:
145 component = section.split('/')[0]
147 # Expand default component
149 comp = get_component(section, session)
153 component = comp.component_name
155 return (section, component)
157 ################################################################################
159 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
160 require_signature = True
163 require_signature = False
165 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
166 contents = signed_file.contents
171 # Split the lines in the input, keeping the linebreaks.
172 lines = contents.splitlines(True)
175 raise ParseChangesError, "[Empty changes file]"
177 # Reindex by line number so we can easily verify the format of
183 indexed_lines[index] = line[:-1]
185 num_of_lines = len(indexed_lines.keys())
188 while index < num_of_lines:
190 line = indexed_lines[index]
191 if line == "" and signing_rules == 1:
192 if index != num_of_lines:
193 raise InvalidDscError, index
195 slf = re_single_line_field.match(line)
197 field = slf.groups()[0].lower()
198 changes[field] = slf.groups()[1]
202 changes[field] += '\n'
204 mlf = re_multi_line_field.match(line)
207 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
208 if first == 1 and changes[field] != "":
209 changes[field] += '\n'
211 changes[field] += mlf.groups()[0] + '\n'
215 changes["filecontents"] = armored_contents
217 if changes.has_key("source"):
218 # Strip the source version in brackets from the source field,
219 # put it in the "source-version" field instead.
220 srcver = re_srchasver.search(changes["source"])
222 changes["source"] = srcver.group(1)
223 changes["source-version"] = srcver.group(2)
226 raise ParseChangesError, error
230 ################################################################################
232 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
234 Parses a changes file and returns a dictionary where each field is a
235 key. The mandatory first argument is the filename of the .changes
238 signing_rules is an optional argument:
240 - If signing_rules == -1, no signature is required.
241 - If signing_rules == 0 (the default), a signature is required.
242 - If signing_rules == 1, it turns on the same strict format checking
245 The rules for (signing_rules == 1)-mode are:
247 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
248 followed by any PGP header data and must end with a blank line.
250 - The data section must end with a blank line and must be followed by
251 "-----BEGIN PGP SIGNATURE-----".
254 changes_in = open_file(filename)
255 content = changes_in.read()
258 unicode(content, 'utf-8')
260 raise ChangesUnicodeError, "Changes file not proper utf-8"
261 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
265 # Finally ensure that everything needed for .changes is there
266 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
267 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
270 for keyword in must_keywords:
271 if not changes.has_key(keyword.lower()):
272 missingfields.append(keyword)
274 if len(missingfields):
275 raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)
279 ################################################################################
281 def hash_key(hashname):
282 return '%ssum' % hashname
284 ################################################################################
286 def create_hash(where, files, hashname, hashfunc):
288 create_hash extends the passed files dict with the given hash by
289 iterating over all files on disk and passing them to the hashing
294 for f in files.keys():
296 file_handle = open_file(f)
297 except CantOpenError:
298 rejmsg.append("Could not open file %s for checksumming" % (f))
301 files[f][hash_key(hashname)] = hashfunc(file_handle)
306 ################################################################################
308 def check_hash(where, files, hashname, hashfunc):
310 check_hash checks the given hash in the files dict against the actual
311 files on disk. The hash values need to be present consistently in
312 all file entries. It does not modify its input in any way.
316 for f in files.keys():
320 file_handle = open_file(f)
322 # Check for the hash entry, to not trigger a KeyError.
323 if not files[f].has_key(hash_key(hashname)):
324 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
328 # Actually check the hash for correctness.
329 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
330 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
332 except CantOpenError:
333 # TODO: This happens when the file is in the pool.
334 # warn("Cannot open file %s" % f)
341 ################################################################################
343 def check_size(where, files):
345 check_size checks the file sizes in the passed files dict against the
350 for f in files.keys():
355 # TODO: This happens when the file is in the pool.
359 actual_size = entry[stat.ST_SIZE]
360 size = int(files[f]["size"])
361 if size != actual_size:
362 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
363 % (f, actual_size, size, where))
366 ################################################################################
368 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
370 Verify that the files listed in the Files field of the .dsc are
371 those expected given the announced Format.
373 @type dsc_filename: string
374 @param dsc_filename: path of .dsc file
377 @param dsc: the content of the .dsc parsed by C{parse_changes()}
379 @type dsc_files: dict
380 @param dsc_files: the file list returned by C{build_file_list()}
383 @return: all errors detected
387 # Parse the file if needed
389 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
391 if dsc_files is None:
392 dsc_files = build_file_list(dsc, is_a_dsc=1)
394 # Ensure .dsc lists proper set of source files according to the format
396 has = defaultdict(lambda: 0)
399 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
400 (r'diff.gz', ('debian_diff',)),
401 (r'tar.gz', ('native_tar_gz', 'native_tar')),
402 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
403 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
404 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
405 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
408 for f in dsc_files.keys():
409 m = re_issource.match(f)
411 rejmsg.append("%s: %s in Files field not recognised as source."
415 # Populate 'has' dictionary by resolving keys in lookup table
417 for regex, keys in ftype_lookup:
418 if re.match(regex, m.group(3)):
424 # File does not match anything in lookup table; reject
426 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
428 # Check for multiple files
429 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
430 if has[file_type] > 1:
431 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
433 # Source format specific tests
435 format = get_format_from_string(dsc['format'])
437 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
440 except UnknownFormatError:
441 # Not an error here for now
446 ################################################################################
448 def check_hash_fields(what, manifest):
450 check_hash_fields ensures that there are no checksum fields in the
451 given dict that we do not know about.
455 hashes = map(lambda x: x[0], known_hashes)
456 for field in manifest:
457 if field.startswith("checksums-"):
458 hashname = field.split("-",1)[1]
459 if hashname not in hashes:
460 rejmsg.append("Unsupported checksum field for %s "\
461 "in %s" % (hashname, what))
464 ################################################################################
466 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
467 if format >= version:
468 # The version should contain the specified hash.
471 # Import hashes from the changes
472 rejmsg = parse_checksums(".changes", files, changes, hashname)
476 # We need to calculate the hash because it can't possibly
479 return func(".changes", files, hashname, hashfunc)
481 # We could add the orig which might be in the pool to the files dict to
482 # access the checksums easily.
484 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
486 ensure_dsc_hashes' task is to ensure that each and every *present* hash
487 in the dsc is correct, i.e. identical to the changes file and if necessary
488 the pool. The latter task is delegated to check_hash.
492 if not dsc.has_key('Checksums-%s' % (hashname,)):
494 # Import hashes from the dsc
495 parse_checksums(".dsc", dsc_files, dsc, hashname)
497 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
500 ################################################################################
502 def parse_checksums(where, files, manifest, hashname):
504 field = 'checksums-%s' % hashname
505 if not field in manifest:
507 for line in manifest[field].split('\n'):
510 clist = line.strip().split(' ')
512 checksum, size, checkfile = clist
514 rejmsg.append("Cannot parse checksum line [%s]" % (line))
516 if not files.has_key(checkfile):
517 # TODO: check for the file's entry in the original files dict, not
518 # the one modified by (auto)byhand and other weird stuff
519 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
520 # (file, hashname, where))
522 if not files[checkfile]["size"] == size:
523 rejmsg.append("%s: size differs for files and checksums-%s entry "\
524 "in %s" % (checkfile, hashname, where))
526 files[checkfile][hash_key(hashname)] = checksum
527 for f in files.keys():
528 if not files[f].has_key(hash_key(hashname)):
529 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
533 ################################################################################
535 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
537 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
540 # Make sure we have a Files: field to parse...
541 if not changes.has_key(field):
542 raise NoFilesFieldError
544 # Validate .changes Format: field
546 validate_changes_format(parse_format(changes['format']), field)
548 includes_section = (not is_a_dsc) and field == "files"
550 # Parse each entry/line:
551 for i in changes[field].split('\n'):
555 section = priority = ""
558 (md5, size, section, priority, name) = s
560 (md5, size, name) = s
562 raise ParseChangesError, i
569 (section, component) = extract_component_from_section(section)
571 files[name] = dict(size=size, section=section,
572 priority=priority, component=component)
573 files[name][hashname] = md5
577 ################################################################################
579 # see http://bugs.debian.org/619131
580 def build_package_set(dsc, session = None):
581 if not dsc.has_key("package-set"):
586 for line in dsc["package-set"].split("\n"):
590 (name, section, priority) = line.split()
591 (section, component) = extract_component_from_section(section)
594 if name.find(":") != -1:
595 (package_type, name) = name.split(":", 1)
596 if package_type == "src":
599 # Validate type if we have a session
600 if session and get_override_type(package_type, session) is None:
601 # Maybe just warn and ignore? exit(1) might be a bit hard...
602 utils.fubar("invalid type (%s) in Package-Set." % (package_type))
609 if package_type == "dsc":
612 if not packages.has_key(name) or packages[name]["type"] == "dsc":
613 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
617 ################################################################################
619 def send_mail (message, filename=""):
620 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
622 # Check whether we're supposed to be sending mail
623 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
626 # If we've been passed a string dump it into a temporary file
628 (fd, filename) = tempfile.mkstemp()
629 os.write (fd, message)
632 if Cnf.has_key("Dinstall::MailWhiteList") and \
633 Cnf["Dinstall::MailWhiteList"] != "":
634 message_in = open_file(filename)
635 message_raw = modemail.message_from_file(message_in)
639 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
641 for line in whitelist_in:
642 if not re_whitespace_comment.match(line):
643 if re_re_mark.match(line):
644 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
646 whitelist.append(re.compile(re.escape(line.strip())))
651 fields = ["To", "Bcc", "Cc"]
654 value = message_raw.get(field, None)
657 for item in value.split(","):
658 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
664 if not mail_whitelisted:
665 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
669 # Doesn't have any mail in whitelist so remove the header
671 del message_raw[field]
673 message_raw.replace_header(field, ', '.join(match))
675 # Change message fields in order if we don't have a To header
676 if not message_raw.has_key("To"):
679 if message_raw.has_key(field):
680 message_raw[fields[-1]] = message_raw[field]
681 del message_raw[field]
684 # Clean up any temporary files
685 # and return, as we removed all recipients.
687 os.unlink (filename);
690 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
691 os.write (fd, message_raw.as_string(True));
695 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
697 raise SendmailFailedError, output
699 # Clean up any temporary files
703 ################################################################################
705 def poolify (source, component):
708 if source[:3] == "lib":
709 return component + source[:4] + '/' + source + '/'
711 return component + source[:1] + '/' + source + '/'
713 ################################################################################
715 def move (src, dest, overwrite = 0, perms = 0664):
716 if os.path.exists(dest) and os.path.isdir(dest):
719 dest_dir = os.path.dirname(dest)
720 if not os.path.exists(dest_dir):
721 umask = os.umask(00000)
722 os.makedirs(dest_dir, 02775)
724 #print "Moving %s to %s..." % (src, dest)
725 if os.path.exists(dest) and os.path.isdir(dest):
726 dest += '/' + os.path.basename(src)
727 # Don't overwrite unless forced to
728 if os.path.exists(dest):
730 fubar("Can't move %s to %s - file already exists." % (src, dest))
732 if not os.access(dest, os.W_OK):
733 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
734 shutil.copy2(src, dest)
735 os.chmod(dest, perms)
738 def copy (src, dest, overwrite = 0, perms = 0664):
739 if os.path.exists(dest) and os.path.isdir(dest):
742 dest_dir = os.path.dirname(dest)
743 if not os.path.exists(dest_dir):
744 umask = os.umask(00000)
745 os.makedirs(dest_dir, 02775)
747 #print "Copying %s to %s..." % (src, dest)
748 if os.path.exists(dest) and os.path.isdir(dest):
749 dest += '/' + os.path.basename(src)
750 # Don't overwrite unless forced to
751 if os.path.exists(dest):
753 raise FileExistsError
755 if not os.access(dest, os.W_OK):
756 raise CantOverwriteError
757 shutil.copy2(src, dest)
758 os.chmod(dest, perms)
760 ################################################################################
763 res = socket.getfqdn()
764 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
765 if database_hostname:
766 return database_hostname
770 def which_conf_file ():
771 if os.getenv('DAK_CONFIG'):
772 return os.getenv('DAK_CONFIG')
774 res = socket.getfqdn()
775 # In case we allow local config files per user, try if one exists
776 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
777 homedir = os.getenv("HOME")
778 confpath = os.path.join(homedir, "/etc/dak.conf")
779 if os.path.exists(confpath):
780 apt_pkg.ReadConfigFileISC(Cnf,default_config)
782 # We are still in here, so there is no local config file or we do
783 # not allow local files. Do the normal stuff.
784 if Cnf.get("Config::" + res + "::DakConfig"):
785 return Cnf["Config::" + res + "::DakConfig"]
787 return default_config
789 def which_apt_conf_file ():
790 res = socket.getfqdn()
791 # In case we allow local config files per user, try if one exists
792 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
793 homedir = os.getenv("HOME")
794 confpath = os.path.join(homedir, "/etc/dak.conf")
795 if os.path.exists(confpath):
796 apt_pkg.ReadConfigFileISC(Cnf,default_config)
798 if Cnf.get("Config::" + res + "::AptConfig"):
799 return Cnf["Config::" + res + "::AptConfig"]
801 return default_apt_config
803 def which_alias_file():
804 hostname = socket.getfqdn()
805 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
806 if os.path.exists(aliasfn):
811 ################################################################################
813 def TemplateSubst(subst_map, filename):
814 """ Perform a substition of template """
815 templatefile = open_file(filename)
816 template = templatefile.read()
817 for k, v in subst_map.iteritems():
818 template = template.replace(k, str(v))
822 ################################################################################
824 def fubar(msg, exit_code=1):
825 sys.stderr.write("E: %s\n" % (msg))
829 sys.stderr.write("W: %s\n" % (msg))
831 ################################################################################
833 # Returns the user name with a laughable attempt at rfc822 conformancy
834 # (read: removing stray periods).
836 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
839 return pwd.getpwuid(os.getuid())[0]
841 ################################################################################
851 return ("%d%s" % (c, t))
853 ################################################################################
855 def cc_fix_changes (changes):
856 o = changes.get("architecture", "")
858 del changes["architecture"]
859 changes["architecture"] = {}
861 changes["architecture"][j] = 1
863 def changes_compare (a, b):
864 """ Sort by source name, source version, 'have source', and then by filename """
866 a_changes = parse_changes(a)
871 b_changes = parse_changes(b)
875 cc_fix_changes (a_changes)
876 cc_fix_changes (b_changes)
878 # Sort by source name
879 a_source = a_changes.get("source")
880 b_source = b_changes.get("source")
881 q = cmp (a_source, b_source)
885 # Sort by source version
886 a_version = a_changes.get("version", "0")
887 b_version = b_changes.get("version", "0")
888 q = apt_pkg.VersionCompare(a_version, b_version)
892 # Sort by 'have source'
893 a_has_source = a_changes["architecture"].get("source")
894 b_has_source = b_changes["architecture"].get("source")
895 if a_has_source and not b_has_source:
897 elif b_has_source and not a_has_source:
900 # Fall back to sort by filename
903 ################################################################################
905 def find_next_free (dest, too_many=100):
908 while os.path.exists(dest) and extra < too_many:
909 dest = orig_dest + '.' + repr(extra)
911 if extra >= too_many:
912 raise NoFreeFilenameError
915 ################################################################################
917 def result_join (original, sep = '\t'):
919 for i in xrange(len(original)):
920 if original[i] == None:
921 resultlist.append("")
923 resultlist.append(original[i])
924 return sep.join(resultlist)
926 ################################################################################
928 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
930 for line in str.split('\n'):
932 if line or include_blank_lines:
933 out += "%s%s\n" % (prefix, line)
934 # Strip trailing new line
939 ################################################################################
941 def validate_changes_file_arg(filename, require_changes=1):
943 'filename' is either a .changes or .dak file. If 'filename' is a
944 .dak file, it's changed to be the corresponding .changes file. The
945 function then checks if the .changes file a) exists and b) is
946 readable and returns the .changes filename if so. If there's a
947 problem, the next action depends on the option 'require_changes'
950 - If 'require_changes' == -1, errors are ignored and the .changes
951 filename is returned.
952 - If 'require_changes' == 0, a warning is given and 'None' is returned.
953 - If 'require_changes' == 1, a fatal error is raised.
958 orig_filename = filename
959 if filename.endswith(".dak"):
960 filename = filename[:-4]+".changes"
962 if not filename.endswith(".changes"):
963 error = "invalid file type; not a changes file"
965 if not os.access(filename,os.R_OK):
966 if os.path.exists(filename):
967 error = "permission denied"
969 error = "file not found"
972 if require_changes == 1:
973 fubar("%s: %s." % (orig_filename, error))
974 elif require_changes == 0:
975 warn("Skipping %s - %s" % (orig_filename, error))
977 else: # We only care about the .dak file
982 ################################################################################
985 return (arch != "source" and arch != "all")
987 ################################################################################
989 def join_with_commas_and(list):
990 if len(list) == 0: return "nothing"
991 if len(list) == 1: return list[0]
992 return ", ".join(list[:-1]) + " and " + list[-1]
994 ################################################################################
999 (pkg, version, constraint) = atom
1001 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
1004 pp_deps.append(pp_dep)
1005 return " |".join(pp_deps)
1007 ################################################################################
1012 ################################################################################
1014 def parse_args(Options):
1015 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1016 # XXX: This should go away and everything which calls it be converted
1017 # to use SQLA properly. For now, we'll just fix it not to use
1018 # the old Pg interface though
1019 session = DBConn().session()
1021 if Options["Suite"]:
1023 for suitename in split_args(Options["Suite"]):
1024 suite = get_suite(suitename, session=session)
1025 if suite.suite_id is None:
1026 warn("suite '%s' not recognised." % (suite.suite_name))
1028 suite_ids_list.append(suite.suite_id)
1030 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1032 fubar("No valid suite given.")
1037 if Options["Component"]:
1038 component_ids_list = []
1039 for componentname in split_args(Options["Component"]):
1040 component = get_component(componentname, session=session)
1041 if component is None:
1042 warn("component '%s' not recognised." % (componentname))
1044 component_ids_list.append(component.component_id)
1045 if component_ids_list:
1046 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1048 fubar("No valid component given.")
1052 # Process architecture
1053 con_architectures = ""
1055 if Options["Architecture"]:
1057 for archname in split_args(Options["Architecture"]):
1058 if archname == "source":
1061 arch = get_architecture(archname, session=session)
1063 warn("architecture '%s' not recognised." % (archname))
1065 arch_ids_list.append(arch.arch_id)
1067 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1069 if not check_source:
1070 fubar("No valid architecture given.")
1074 return (con_suites, con_architectures, con_components, check_source)
1076 ################################################################################
1078 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1079 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1082 tb = sys.exc_info()[2]
1089 frame = frame.f_back
1091 traceback.print_exc()
1093 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1094 frame.f_code.co_filename,
1096 for key, value in frame.f_locals.items():
1097 print "\t%20s = " % key,
1101 print "<unable to print>"
1103 ################################################################################
1105 def try_with_debug(function):
1113 ################################################################################
1115 def arch_compare_sw (a, b):
1117 Function for use in sorting lists of architectures.
1119 Sorts normally except that 'source' dominates all others.
1122 if a == "source" and b == "source":
1131 ################################################################################
1133 def split_args (s, dwim=1):
1135 Split command line arguments which can be separated by either commas
1136 or whitespace. If dwim is set, it will complain about string ending
1137 in comma since this usually means someone did 'dak ls -a i386, m68k
1138 foo' or something and the inevitable confusion resulting from 'm68k'
1139 being treated as an argument is undesirable.
1142 if s.find(",") == -1:
1145 if s[-1:] == "," and dwim:
1146 fubar("split_args: found trailing comma, spurious space maybe?")
1149 ################################################################################
1151 def gpgv_get_status_output(cmd, status_read, status_write):
1153 Our very own version of commands.getouputstatus(), hacked to support
1157 cmd = ['/bin/sh', '-c', cmd]
1158 p2cread, p2cwrite = os.pipe()
1159 c2pread, c2pwrite = os.pipe()
1160 errout, errin = os.pipe()
1170 for i in range(3, 256):
1171 if i != status_write:
1177 os.execvp(cmd[0], cmd)
1183 os.dup2(c2pread, c2pwrite)
1184 os.dup2(errout, errin)
1186 output = status = ""
1188 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1191 r = os.read(fd, 8196)
1193 more_data.append(fd)
1194 if fd == c2pwrite or fd == errin:
1196 elif fd == status_read:
1199 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1201 pid, exit_status = os.waitpid(pid, 0)
1203 os.close(status_write)
1204 os.close(status_read)
1214 return output, status, exit_status
1216 ################################################################################
1218 def process_gpgv_output(status):
1219 # Process the status-fd output
1222 for line in status.split('\n'):
1226 split = line.split()
1228 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1230 (gnupg, keyword) = split[:2]
1231 if gnupg != "[GNUPG:]":
1232 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1235 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1236 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1239 keywords[keyword] = args
1241 return (keywords, internal_error)
1243 ################################################################################
1245 def retrieve_key (filename, keyserver=None, keyring=None):
1247 Retrieve the key that signed 'filename' from 'keyserver' and
1248 add it to 'keyring'. Returns nothing on success, or an error message
1252 # Defaults for keyserver and keyring
1254 keyserver = Cnf["Dinstall::KeyServer"]
1256 keyring = get_primary_keyring_path()
1258 # Ensure the filename contains no shell meta-characters or other badness
1259 if not re_taint_free.match(filename):
1260 return "%s: tainted filename" % (filename)
1262 # Invoke gpgv on the file
1263 status_read, status_write = os.pipe()
1264 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1265 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1267 # Process the status-fd output
1268 (keywords, internal_error) = process_gpgv_output(status)
1270 return internal_error
1272 if not keywords.has_key("NO_PUBKEY"):
1273 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1275 fingerprint = keywords["NO_PUBKEY"][0]
1276 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1277 # it'll try to create a lockfile in /dev. A better solution might
1278 # be a tempfile or something.
1279 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1280 % (Cnf["Dinstall::SigningKeyring"])
1281 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1282 % (keyring, keyserver, fingerprint)
1283 (result, output) = commands.getstatusoutput(cmd)
1285 return "'%s' failed with exit code %s" % (cmd, result)
1289 ################################################################################
1291 def gpg_keyring_args(keyrings=None):
1293 keyrings = get_active_keyring_paths()
1295 return " ".join(["--keyring %s" % x for x in keyrings])
1297 ################################################################################
1299 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1301 Check the signature of a file and return the fingerprint if the
1302 signature is valid or 'None' if it's not. The first argument is the
1303 filename whose signature should be checked. The second argument is a
1304 reject function and is called when an error is found. The reject()
1305 function must allow for two arguments: the first is the error message,
1306 the second is an optional prefix string. It's possible for reject()
1307 to be called more than once during an invocation of check_signature().
1308 The third argument is optional and is the name of the files the
1309 detached signature applies to. The fourth argument is optional and is
1310 a *list* of keyrings to use. 'autofetch' can either be None, True or
1311 False. If None, the default behaviour specified in the config will be
1317 # Ensure the filename contains no shell meta-characters or other badness
1318 if not re_taint_free.match(sig_filename):
1319 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1320 return (None, rejects)
1322 if data_filename and not re_taint_free.match(data_filename):
1323 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1324 return (None, rejects)
1327 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1329 # Autofetch the signing key if that's enabled
1330 if autofetch == None:
1331 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1333 error_msg = retrieve_key(sig_filename)
1335 rejects.append(error_msg)
1336 return (None, rejects)
1338 # Build the command line
1339 status_read, status_write = os.pipe()
1340 cmd = "gpgv --status-fd %s %s %s %s" % (
1341 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1343 # Invoke gpgv on the file
1344 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1346 # Process the status-fd output
1347 (keywords, internal_error) = process_gpgv_output(status)
1349 # If we failed to parse the status-fd output, let's just whine and bail now
1351 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1352 rejects.append(internal_error, "")
1353 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1354 return (None, rejects)
1356 # Now check for obviously bad things in the processed output
1357 if keywords.has_key("KEYREVOKED"):
1358 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1359 if keywords.has_key("BADSIG"):
1360 rejects.append("bad signature on %s." % (sig_filename))
1361 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1362 rejects.append("failed to check signature on %s." % (sig_filename))
1363 if keywords.has_key("NO_PUBKEY"):
1364 args = keywords["NO_PUBKEY"]
1367 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1368 if keywords.has_key("BADARMOR"):
1369 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1370 if keywords.has_key("NODATA"):
1371 rejects.append("no signature found in %s." % (sig_filename))
1372 if keywords.has_key("EXPKEYSIG"):
1373 args = keywords["EXPKEYSIG"]
1376 rejects.append("Signature made by expired key 0x%s" % (key))
1377 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1378 args = keywords["KEYEXPIRED"]
1382 if timestamp.count("T") == 0:
1384 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1386 expiredate = "unknown (%s)" % (timestamp)
1388 expiredate = timestamp
1389 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1391 if len(rejects) > 0:
1392 return (None, rejects)
1394 # Next check gpgv exited with a zero return code
1396 rejects.append("gpgv failed while checking %s." % (sig_filename))
1398 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1400 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1401 return (None, rejects)
1403 # Sanity check the good stuff we expect
1404 if not keywords.has_key("VALIDSIG"):
1405 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1407 args = keywords["VALIDSIG"]
1409 rejects.append("internal error while checking signature on %s." % (sig_filename))
1411 fingerprint = args[0]
1412 if not keywords.has_key("GOODSIG"):
1413 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1414 if not keywords.has_key("SIG_ID"):
1415 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1417 # Finally ensure there's not something we don't recognise
1418 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1419 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1420 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1422 for keyword in keywords.keys():
1423 if not known_keywords.has_key(keyword):
1424 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1426 if len(rejects) > 0:
1427 return (None, rejects)
1429 return (fingerprint, [])
1431 ################################################################################
1433 def gpg_get_key_addresses(fingerprint):
1434 """retreive email addresses from gpg key uids for a given fingerprint"""
1435 addresses = key_uid_email_cache.get(fingerprint)
1436 if addresses != None:
1439 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1440 % (gpg_keyring_args(), fingerprint)
1441 (result, output) = commands.getstatusoutput(cmd)
1443 for l in output.split('\n'):
1444 m = re_gpg_uid.match(l)
1446 addresses.add(m.group(1))
1447 key_uid_email_cache[fingerprint] = addresses
1450 ################################################################################
1452 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1454 def wrap(paragraph, max_length, prefix=""):
1458 words = paragraph.split()
1461 word_size = len(word)
1462 if word_size > max_length:
1464 s += line + '\n' + prefix
1465 s += word + '\n' + prefix
1468 new_length = len(line) + word_size + 1
1469 if new_length > max_length:
1470 s += line + '\n' + prefix
1483 ################################################################################
1485 def clean_symlink (src, dest, root):
1487 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1490 src = src.replace(root, '', 1)
1491 dest = dest.replace(root, '', 1)
1492 dest = os.path.dirname(dest)
1493 new_src = '../' * len(dest.split('/'))
1494 return new_src + src
1496 ################################################################################
1498 def temp_filename(directory=None, prefix="dak", suffix=""):
1500 Return a secure and unique filename by pre-creating it.
1501 If 'directory' is non-null, it will be the directory the file is pre-created in.
1502 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1503 If 'suffix' is non-null, the filename will end with it.
1505 Returns a pair (fd, name).
1508 return tempfile.mkstemp(suffix, prefix, directory)
1510 ################################################################################
1512 def temp_dirname(parent=None, prefix="dak", suffix=""):
1514 Return a secure and unique directory by pre-creating it.
1515 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1516 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1517 If 'suffix' is non-null, the filename will end with it.
1519 Returns a pathname to the new directory
1522 return tempfile.mkdtemp(suffix, prefix, parent)
1524 ################################################################################
1526 def is_email_alias(email):
1527 """ checks if the user part of the email is listed in the alias file """
1529 if alias_cache == None:
1530 aliasfn = which_alias_file()
1533 for l in open(aliasfn):
1534 alias_cache.add(l.split(':')[0])
1535 uid = email.split('@')[0]
1536 return uid in alias_cache
1538 ################################################################################
1540 def get_changes_files(from_dir):
1542 Takes a directory and lists all .changes files in it (as well as chdir'ing
1543 to the directory; this is due to broken behaviour on the part of p-u/p-a
1544 when you're not in the right place)
1546 Returns a list of filenames
1549 # Much of the rest of p-u/p-a depends on being in the right place
1551 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1553 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1555 return changes_files
1557 ################################################################################
1561 Cnf = apt_pkg.newConfiguration()
1562 if not os.getenv("DAK_TEST"):
1563 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1565 if which_conf_file() != default_config:
1566 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1568 ################################################################################
1570 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1572 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1573 Well, actually it parsed a local copy, but let's document the source
1576 returns a dict associating source package name with a list of open wnpp
1577 bugs (Yes, there might be more than one)
1583 lines = f.readlines()
1585 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1590 splited_line = line.split(": ", 1)
1591 if len(splited_line) > 1:
1592 wnpp[splited_line[0]] = splited_line[1].split("|")
1594 for source in wnpp.keys():
1596 for wnpp_bug in wnpp[source]:
1597 bug_no = re.search("(\d)+", wnpp_bug).group()
1603 ################################################################################
1605 def get_packages_from_ftp(root, suite, component, architecture):
1607 Returns an object containing apt_pkg-parseable data collected by
1608 aggregating Packages.gz files gathered for each architecture.
1611 @param root: path to ftp archive root directory
1614 @param suite: suite to extract files from
1616 @type component: string
1617 @param component: component to extract files from
1619 @type architecture: string
1620 @param architecture: architecture to extract files from
1623 @return: apt_pkg class containing package data
1626 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1627 (fd, temp_file) = temp_filename()
1628 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1630 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1631 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1632 if os.path.exists(filename):
1633 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1635 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1636 packages = open_file(temp_file)
1637 Packages = apt_pkg.ParseTagFile(packages)
1638 os.unlink(temp_file)