2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite, get_override_type, Keyring, session_wrapper
43 from dak_exceptions import *
44 from gpg import SignedFile
45 from textutils import fix_maintainer
46 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
47 re_multi_line_field, re_srchasver, re_taint_free, \
48 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
51 from formats import parse_format, validate_changes_format
52 from srcformats import get_format_from_string
53 from collections import defaultdict
55 ################################################################################
57 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
58 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
60 alias_cache = None #: Cache for email alias checks
61 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
63 # (hashname, function, earliest_changes_version)
64 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
65 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
67 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
68 # code in lenny's Python. This also affects commands.getoutput and
70 def dak_getstatusoutput(cmd):
71 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
72 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
74 output = pipe.stdout.read()
78 if output[-1:] == '\n':
86 commands.getstatusoutput = dak_getstatusoutput
88 ################################################################################
91 """ Escape html chars """
92 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
94 ################################################################################
96 def open_file(filename, mode='r'):
98 Open C{file}, return fileobject.
100 @type filename: string
101 @param filename: path/filename to open
104 @param mode: open mode
107 @return: open fileobject
109 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
113 f = open(filename, mode)
115 raise CantOpenError, filename
118 ################################################################################
120 def our_raw_input(prompt=""):
124 sys.stdout.write(prompt)
133 sys.stderr.write("\nUser interrupt (^D).\n")
136 ################################################################################
138 def extract_component_from_section(section):
141 if section.find('/') != -1:
142 component = section.split('/')[0]
144 # Expand default component
146 if Cnf.has_key("Component::%s" % section):
151 return (section, component)
153 ################################################################################
155 def parse_deb822(armored_contents, signing_rules=0, keyrings=None):
157 keyrings = [ k.keyring_name for k in DBConn().session().query(Keyring).filter(Keyring.active == True).all() ]
158 require_signature = True
159 if signing_rules == -1:
160 require_signature = False
162 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
163 contents = signed_file.contents
168 # Split the lines in the input, keeping the linebreaks.
169 lines = contents.splitlines(True)
172 raise ParseChangesError, "[Empty changes file]"
174 # Reindex by line number so we can easily verify the format of
180 indexed_lines[index] = line[:-1]
182 num_of_lines = len(indexed_lines.keys())
185 while index < num_of_lines:
187 line = indexed_lines[index]
188 if line == "" and signing_rules == 1:
189 if index != num_of_lines:
190 raise InvalidDscError, index
192 slf = re_single_line_field.match(line)
194 field = slf.groups()[0].lower()
195 changes[field] = slf.groups()[1]
199 changes[field] += '\n'
201 mlf = re_multi_line_field.match(line)
204 raise ParseChangesError, "'%s'\n [Multi-line field continuing on from nothing?]" % (line)
205 if first == 1 and changes[field] != "":
206 changes[field] += '\n'
208 changes[field] += mlf.groups()[0] + '\n'
212 changes["filecontents"] = armored_contents
214 if changes.has_key("source"):
215 # Strip the source version in brackets from the source field,
216 # put it in the "source-version" field instead.
217 srcver = re_srchasver.search(changes["source"])
219 changes["source"] = srcver.group(1)
220 changes["source-version"] = srcver.group(2)
223 raise ParseChangesError, error
227 ################################################################################
229 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
231 Parses a changes file and returns a dictionary where each field is a
232 key. The mandatory first argument is the filename of the .changes
235 signing_rules is an optional argument:
237 - If signing_rules == -1, no signature is required.
238 - If signing_rules == 0 (the default), a signature is required.
239 - If signing_rules == 1, it turns on the same strict format checking
242 The rules for (signing_rules == 1)-mode are:
244 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
245 followed by any PGP header data and must end with a blank line.
247 - The data section must end with a blank line and must be followed by
248 "-----BEGIN PGP SIGNATURE-----".
251 changes_in = open_file(filename)
252 content = changes_in.read()
255 unicode(content, 'utf-8')
257 raise ChangesUnicodeError, "Changes file not proper utf-8"
258 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
262 # Finally ensure that everything needed for .changes is there
263 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
264 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
267 for keyword in must_keywords:
268 if not changes.has_key(keyword.lower()):
269 missingfields.append(keyword)
271 if len(missingfields):
272 raise ParseChangesError, "Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields)
276 ################################################################################
278 def hash_key(hashname):
279 return '%ssum' % hashname
281 ################################################################################
283 def create_hash(where, files, hashname, hashfunc):
285 create_hash extends the passed files dict with the given hash by
286 iterating over all files on disk and passing them to the hashing
291 for f in files.keys():
293 file_handle = open_file(f)
294 except CantOpenError:
295 rejmsg.append("Could not open file %s for checksumming" % (f))
298 files[f][hash_key(hashname)] = hashfunc(file_handle)
303 ################################################################################
305 def check_hash(where, files, hashname, hashfunc):
307 check_hash checks the given hash in the files dict against the actual
308 files on disk. The hash values need to be present consistently in
309 all file entries. It does not modify its input in any way.
313 for f in files.keys():
317 file_handle = open_file(f)
319 # Check for the hash entry, to not trigger a KeyError.
320 if not files[f].has_key(hash_key(hashname)):
321 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
325 # Actually check the hash for correctness.
326 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
327 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
329 except CantOpenError:
330 # TODO: This happens when the file is in the pool.
331 # warn("Cannot open file %s" % f)
338 ################################################################################
340 def check_size(where, files):
342 check_size checks the file sizes in the passed files dict against the
347 for f in files.keys():
352 # TODO: This happens when the file is in the pool.
356 actual_size = entry[stat.ST_SIZE]
357 size = int(files[f]["size"])
358 if size != actual_size:
359 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
360 % (f, actual_size, size, where))
363 ################################################################################
365 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
367 Verify that the files listed in the Files field of the .dsc are
368 those expected given the announced Format.
370 @type dsc_filename: string
371 @param dsc_filename: path of .dsc file
374 @param dsc: the content of the .dsc parsed by C{parse_changes()}
376 @type dsc_files: dict
377 @param dsc_files: the file list returned by C{build_file_list()}
380 @return: all errors detected
384 # Parse the file if needed
386 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
388 if dsc_files is None:
389 dsc_files = build_file_list(dsc, is_a_dsc=1)
391 # Ensure .dsc lists proper set of source files according to the format
393 has = defaultdict(lambda: 0)
396 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
397 (r'diff.gz', ('debian_diff',)),
398 (r'tar.gz', ('native_tar_gz', 'native_tar')),
399 (r'debian\.tar\.(gz|bz2)', ('debian_tar',)),
400 (r'orig\.tar\.(gz|bz2)', ('orig_tar',)),
401 (r'tar\.(gz|bz2)', ('native_tar',)),
402 (r'orig-.+\.tar\.(gz|bz2)', ('more_orig_tar',)),
405 for f in dsc_files.keys():
406 m = re_issource.match(f)
408 rejmsg.append("%s: %s in Files field not recognised as source."
412 # Populate 'has' dictionary by resolving keys in lookup table
414 for regex, keys in ftype_lookup:
415 if re.match(regex, m.group(3)):
421 # File does not match anything in lookup table; reject
423 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
425 # Check for multiple files
426 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
427 if has[file_type] > 1:
428 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
430 # Source format specific tests
432 format = get_format_from_string(dsc['format'])
434 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
437 except UnknownFormatError:
438 # Not an error here for now
443 ################################################################################
445 def check_hash_fields(what, manifest):
447 check_hash_fields ensures that there are no checksum fields in the
448 given dict that we do not know about.
452 hashes = map(lambda x: x[0], known_hashes)
453 for field in manifest:
454 if field.startswith("checksums-"):
455 hashname = field.split("-",1)[1]
456 if hashname not in hashes:
457 rejmsg.append("Unsupported checksum field for %s "\
458 "in %s" % (hashname, what))
461 ################################################################################
463 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
464 if format >= version:
465 # The version should contain the specified hash.
468 # Import hashes from the changes
469 rejmsg = parse_checksums(".changes", files, changes, hashname)
473 # We need to calculate the hash because it can't possibly
476 return func(".changes", files, hashname, hashfunc)
478 # We could add the orig which might be in the pool to the files dict to
479 # access the checksums easily.
481 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
483 ensure_dsc_hashes' task is to ensure that each and every *present* hash
484 in the dsc is correct, i.e. identical to the changes file and if necessary
485 the pool. The latter task is delegated to check_hash.
489 if not dsc.has_key('Checksums-%s' % (hashname,)):
491 # Import hashes from the dsc
492 parse_checksums(".dsc", dsc_files, dsc, hashname)
494 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
497 ################################################################################
499 def parse_checksums(where, files, manifest, hashname):
501 field = 'checksums-%s' % hashname
502 if not field in manifest:
504 for line in manifest[field].split('\n'):
507 clist = line.strip().split(' ')
509 checksum, size, checkfile = clist
511 rejmsg.append("Cannot parse checksum line [%s]" % (line))
513 if not files.has_key(checkfile):
514 # TODO: check for the file's entry in the original files dict, not
515 # the one modified by (auto)byhand and other weird stuff
516 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
517 # (file, hashname, where))
519 if not files[checkfile]["size"] == size:
520 rejmsg.append("%s: size differs for files and checksums-%s entry "\
521 "in %s" % (checkfile, hashname, where))
523 files[checkfile][hash_key(hashname)] = checksum
524 for f in files.keys():
525 if not files[f].has_key(hash_key(hashname)):
526 rejmsg.append("%s: no entry in checksums-%s in %s" % (checkfile,
530 ################################################################################
532 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
534 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
537 # Make sure we have a Files: field to parse...
538 if not changes.has_key(field):
539 raise NoFilesFieldError
541 # Validate .changes Format: field
543 validate_changes_format(parse_format(changes['format']), field)
545 includes_section = (not is_a_dsc) and field == "files"
547 # Parse each entry/line:
548 for i in changes[field].split('\n'):
552 section = priority = ""
555 (md5, size, section, priority, name) = s
557 (md5, size, name) = s
559 raise ParseChangesError, i
566 (section, component) = extract_component_from_section(section)
568 files[name] = dict(size=size, section=section,
569 priority=priority, component=component)
570 files[name][hashname] = md5
574 ################################################################################
576 # see http://bugs.debian.org/619131
577 def build_package_set(dsc, session = None):
578 if not dsc.has_key("package-set"):
583 for line in dsc["package-set"].split("\n"):
587 (name, section, priority) = line.split()
588 (section, component) = extract_component_from_section(section)
591 if name.find(":") != -1:
592 (package_type, name) = name.split(":", 1)
593 if package_type == "src":
596 # Validate type if we have a session
597 if session and get_override_type(package_type, session) is None:
598 # Maybe just warn and ignore? exit(1) might be a bit hard...
599 utils.fubar("invalid type (%s) in Package-Set." % (package_type))
606 if package_type == "dsc":
609 if not packages.has_key(name) or packages[name]["type"] == "dsc":
610 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
614 ################################################################################
616 def send_mail (message, filename=""):
617 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
619 # Check whether we're supposed to be sending mail
620 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
623 # If we've been passed a string dump it into a temporary file
625 (fd, filename) = tempfile.mkstemp()
626 os.write (fd, message)
629 if Cnf.has_key("Dinstall::MailWhiteList") and \
630 Cnf["Dinstall::MailWhiteList"] != "":
631 message_in = open_file(filename)
632 message_raw = modemail.message_from_file(message_in)
636 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
638 for line in whitelist_in:
639 if not re_whitespace_comment.match(line):
640 if re_re_mark.match(line):
641 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
643 whitelist.append(re.compile(re.escape(line.strip())))
648 fields = ["To", "Bcc", "Cc"]
651 value = message_raw.get(field, None)
654 for item in value.split(","):
655 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
661 if not mail_whitelisted:
662 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
666 # Doesn't have any mail in whitelist so remove the header
668 del message_raw[field]
670 message_raw.replace_header(field, ', '.join(match))
672 # Change message fields in order if we don't have a To header
673 if not message_raw.has_key("To"):
676 if message_raw.has_key(field):
677 message_raw[fields[-1]] = message_raw[field]
678 del message_raw[field]
681 # Clean up any temporary files
682 # and return, as we removed all recipients.
684 os.unlink (filename);
687 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0700);
688 os.write (fd, message_raw.as_string(True));
692 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
694 raise SendmailFailedError, output
696 # Clean up any temporary files
700 ################################################################################
702 def poolify (source, component):
705 if source[:3] == "lib":
706 return component + source[:4] + '/' + source + '/'
708 return component + source[:1] + '/' + source + '/'
710 ################################################################################
712 def move (src, dest, overwrite = 0, perms = 0664):
713 if os.path.exists(dest) and os.path.isdir(dest):
716 dest_dir = os.path.dirname(dest)
717 if not os.path.exists(dest_dir):
718 umask = os.umask(00000)
719 os.makedirs(dest_dir, 02775)
721 #print "Moving %s to %s..." % (src, dest)
722 if os.path.exists(dest) and os.path.isdir(dest):
723 dest += '/' + os.path.basename(src)
724 # Don't overwrite unless forced to
725 if os.path.exists(dest):
727 fubar("Can't move %s to %s - file already exists." % (src, dest))
729 if not os.access(dest, os.W_OK):
730 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
731 shutil.copy2(src, dest)
732 os.chmod(dest, perms)
735 def copy (src, dest, overwrite = 0, perms = 0664):
736 if os.path.exists(dest) and os.path.isdir(dest):
739 dest_dir = os.path.dirname(dest)
740 if not os.path.exists(dest_dir):
741 umask = os.umask(00000)
742 os.makedirs(dest_dir, 02775)
744 #print "Copying %s to %s..." % (src, dest)
745 if os.path.exists(dest) and os.path.isdir(dest):
746 dest += '/' + os.path.basename(src)
747 # Don't overwrite unless forced to
748 if os.path.exists(dest):
750 raise FileExistsError
752 if not os.access(dest, os.W_OK):
753 raise CantOverwriteError
754 shutil.copy2(src, dest)
755 os.chmod(dest, perms)
757 ################################################################################
760 res = socket.getfqdn()
761 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
762 if database_hostname:
763 return database_hostname
767 def which_conf_file ():
768 if os.getenv('DAK_CONFIG'):
769 return os.getenv('DAK_CONFIG')
771 res = socket.getfqdn()
772 # In case we allow local config files per user, try if one exists
773 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
774 homedir = os.getenv("HOME")
775 confpath = os.path.join(homedir, "/etc/dak.conf")
776 if os.path.exists(confpath):
777 apt_pkg.ReadConfigFileISC(Cnf,default_config)
779 # We are still in here, so there is no local config file or we do
780 # not allow local files. Do the normal stuff.
781 if Cnf.get("Config::" + res + "::DakConfig"):
782 return Cnf["Config::" + res + "::DakConfig"]
784 return default_config
786 def which_apt_conf_file ():
787 res = socket.getfqdn()
788 # In case we allow local config files per user, try if one exists
789 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
790 homedir = os.getenv("HOME")
791 confpath = os.path.join(homedir, "/etc/dak.conf")
792 if os.path.exists(confpath):
793 apt_pkg.ReadConfigFileISC(Cnf,default_config)
795 if Cnf.get("Config::" + res + "::AptConfig"):
796 return Cnf["Config::" + res + "::AptConfig"]
798 return default_apt_config
800 def which_alias_file():
801 hostname = socket.getfqdn()
802 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
803 if os.path.exists(aliasfn):
808 ################################################################################
810 def TemplateSubst(subst_map, filename):
811 """ Perform a substition of template """
812 templatefile = open_file(filename)
813 template = templatefile.read()
814 for k, v in subst_map.iteritems():
815 template = template.replace(k, str(v))
819 ################################################################################
821 def fubar(msg, exit_code=1):
822 sys.stderr.write("E: %s\n" % (msg))
826 sys.stderr.write("W: %s\n" % (msg))
828 ################################################################################
830 # Returns the user name with a laughable attempt at rfc822 conformancy
831 # (read: removing stray periods).
833 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
836 return pwd.getpwuid(os.getuid())[0]
838 ################################################################################
848 return ("%d%s" % (c, t))
850 ################################################################################
852 def cc_fix_changes (changes):
853 o = changes.get("architecture", "")
855 del changes["architecture"]
856 changes["architecture"] = {}
858 changes["architecture"][j] = 1
860 def changes_compare (a, b):
861 """ Sort by source name, source version, 'have source', and then by filename """
863 a_changes = parse_changes(a)
868 b_changes = parse_changes(b)
872 cc_fix_changes (a_changes)
873 cc_fix_changes (b_changes)
875 # Sort by source name
876 a_source = a_changes.get("source")
877 b_source = b_changes.get("source")
878 q = cmp (a_source, b_source)
882 # Sort by source version
883 a_version = a_changes.get("version", "0")
884 b_version = b_changes.get("version", "0")
885 q = apt_pkg.VersionCompare(a_version, b_version)
889 # Sort by 'have source'
890 a_has_source = a_changes["architecture"].get("source")
891 b_has_source = b_changes["architecture"].get("source")
892 if a_has_source and not b_has_source:
894 elif b_has_source and not a_has_source:
897 # Fall back to sort by filename
900 ################################################################################
902 def find_next_free (dest, too_many=100):
905 while os.path.exists(dest) and extra < too_many:
906 dest = orig_dest + '.' + repr(extra)
908 if extra >= too_many:
909 raise NoFreeFilenameError
912 ################################################################################
914 def result_join (original, sep = '\t'):
916 for i in xrange(len(original)):
917 if original[i] == None:
918 resultlist.append("")
920 resultlist.append(original[i])
921 return sep.join(resultlist)
923 ################################################################################
925 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
927 for line in str.split('\n'):
929 if line or include_blank_lines:
930 out += "%s%s\n" % (prefix, line)
931 # Strip trailing new line
936 ################################################################################
938 def validate_changes_file_arg(filename, require_changes=1):
940 'filename' is either a .changes or .dak file. If 'filename' is a
941 .dak file, it's changed to be the corresponding .changes file. The
942 function then checks if the .changes file a) exists and b) is
943 readable and returns the .changes filename if so. If there's a
944 problem, the next action depends on the option 'require_changes'
947 - If 'require_changes' == -1, errors are ignored and the .changes
948 filename is returned.
949 - If 'require_changes' == 0, a warning is given and 'None' is returned.
950 - If 'require_changes' == 1, a fatal error is raised.
955 orig_filename = filename
956 if filename.endswith(".dak"):
957 filename = filename[:-4]+".changes"
959 if not filename.endswith(".changes"):
960 error = "invalid file type; not a changes file"
962 if not os.access(filename,os.R_OK):
963 if os.path.exists(filename):
964 error = "permission denied"
966 error = "file not found"
969 if require_changes == 1:
970 fubar("%s: %s." % (orig_filename, error))
971 elif require_changes == 0:
972 warn("Skipping %s - %s" % (orig_filename, error))
974 else: # We only care about the .dak file
979 ################################################################################
982 return (arch != "source" and arch != "all")
984 ################################################################################
986 def join_with_commas_and(list):
987 if len(list) == 0: return "nothing"
988 if len(list) == 1: return list[0]
989 return ", ".join(list[:-1]) + " and " + list[-1]
991 ################################################################################
996 (pkg, version, constraint) = atom
998 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
1001 pp_deps.append(pp_dep)
1002 return " |".join(pp_deps)
1004 ################################################################################
1009 ################################################################################
1011 def parse_args(Options):
1012 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1013 # XXX: This should go away and everything which calls it be converted
1014 # to use SQLA properly. For now, we'll just fix it not to use
1015 # the old Pg interface though
1016 session = DBConn().session()
1018 if Options["Suite"]:
1020 for suitename in split_args(Options["Suite"]):
1021 suite = get_suite(suitename, session=session)
1022 if suite.suite_id is None:
1023 warn("suite '%s' not recognised." % (suite.suite_name))
1025 suite_ids_list.append(suite.suite_id)
1027 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1029 fubar("No valid suite given.")
1034 if Options["Component"]:
1035 component_ids_list = []
1036 for componentname in split_args(Options["Component"]):
1037 component = get_component(componentname, session=session)
1038 if component is None:
1039 warn("component '%s' not recognised." % (componentname))
1041 component_ids_list.append(component.component_id)
1042 if component_ids_list:
1043 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1045 fubar("No valid component given.")
1049 # Process architecture
1050 con_architectures = ""
1052 if Options["Architecture"]:
1054 for archname in split_args(Options["Architecture"]):
1055 if archname == "source":
1058 arch = get_architecture(archname, session=session)
1060 warn("architecture '%s' not recognised." % (archname))
1062 arch_ids_list.append(arch.arch_id)
1064 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1066 if not check_source:
1067 fubar("No valid architecture given.")
1071 return (con_suites, con_architectures, con_components, check_source)
1073 ################################################################################
1075 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1076 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1079 tb = sys.exc_info()[2]
1086 frame = frame.f_back
1088 traceback.print_exc()
1090 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1091 frame.f_code.co_filename,
1093 for key, value in frame.f_locals.items():
1094 print "\t%20s = " % key,
1098 print "<unable to print>"
1100 ################################################################################
1102 def try_with_debug(function):
1110 ################################################################################
1112 def arch_compare_sw (a, b):
1114 Function for use in sorting lists of architectures.
1116 Sorts normally except that 'source' dominates all others.
1119 if a == "source" and b == "source":
1128 ################################################################################
1130 def split_args (s, dwim=1):
1132 Split command line arguments which can be separated by either commas
1133 or whitespace. If dwim is set, it will complain about string ending
1134 in comma since this usually means someone did 'dak ls -a i386, m68k
1135 foo' or something and the inevitable confusion resulting from 'm68k'
1136 being treated as an argument is undesirable.
1139 if s.find(",") == -1:
1142 if s[-1:] == "," and dwim:
1143 fubar("split_args: found trailing comma, spurious space maybe?")
1146 ################################################################################
1148 def gpgv_get_status_output(cmd, status_read, status_write):
1150 Our very own version of commands.getouputstatus(), hacked to support
1154 cmd = ['/bin/sh', '-c', cmd]
1155 p2cread, p2cwrite = os.pipe()
1156 c2pread, c2pwrite = os.pipe()
1157 errout, errin = os.pipe()
1167 for i in range(3, 256):
1168 if i != status_write:
1174 os.execvp(cmd[0], cmd)
1180 os.dup2(c2pread, c2pwrite)
1181 os.dup2(errout, errin)
1183 output = status = ""
1185 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1188 r = os.read(fd, 8196)
1190 more_data.append(fd)
1191 if fd == c2pwrite or fd == errin:
1193 elif fd == status_read:
1196 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1198 pid, exit_status = os.waitpid(pid, 0)
1200 os.close(status_write)
1201 os.close(status_read)
1211 return output, status, exit_status
1213 ################################################################################
1215 def process_gpgv_output(status):
1216 # Process the status-fd output
1219 for line in status.split('\n'):
1223 split = line.split()
1225 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1227 (gnupg, keyword) = split[:2]
1228 if gnupg != "[GNUPG:]":
1229 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1232 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1233 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1236 keywords[keyword] = args
1238 return (keywords, internal_error)
1240 ################################################################################
1242 def retrieve_key (filename, keyserver=None, keyring=None):
1244 Retrieve the key that signed 'filename' from 'keyserver' and
1245 add it to 'keyring'. Returns nothing on success, or an error message
1249 # Defaults for keyserver and keyring
1251 keyserver = Cnf["Dinstall::KeyServer"]
1253 keyring = Cnf.ValueList("Dinstall::GPGKeyring")[0]
1255 # Ensure the filename contains no shell meta-characters or other badness
1256 if not re_taint_free.match(filename):
1257 return "%s: tainted filename" % (filename)
1259 # Invoke gpgv on the file
1260 status_read, status_write = os.pipe()
1261 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1262 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1264 # Process the status-fd output
1265 (keywords, internal_error) = process_gpgv_output(status)
1267 return internal_error
1269 if not keywords.has_key("NO_PUBKEY"):
1270 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1272 fingerprint = keywords["NO_PUBKEY"][0]
1273 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1274 # it'll try to create a lockfile in /dev. A better solution might
1275 # be a tempfile or something.
1276 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1277 % (Cnf["Dinstall::SigningKeyring"])
1278 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1279 % (keyring, keyserver, fingerprint)
1280 (result, output) = commands.getstatusoutput(cmd)
1282 return "'%s' failed with exit code %s" % (cmd, result)
1286 ################################################################################
1288 def gpg_keyring_args(keyrings=None):
1290 keyrings = Cnf.ValueList("Dinstall::GPGKeyring")
1292 return " ".join(["--keyring %s" % x for x in keyrings])
1294 ################################################################################
1296 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1298 Check the signature of a file and return the fingerprint if the
1299 signature is valid or 'None' if it's not. The first argument is the
1300 filename whose signature should be checked. The second argument is a
1301 reject function and is called when an error is found. The reject()
1302 function must allow for two arguments: the first is the error message,
1303 the second is an optional prefix string. It's possible for reject()
1304 to be called more than once during an invocation of check_signature().
1305 The third argument is optional and is the name of the files the
1306 detached signature applies to. The fourth argument is optional and is
1307 a *list* of keyrings to use. 'autofetch' can either be None, True or
1308 False. If None, the default behaviour specified in the config will be
1314 # Ensure the filename contains no shell meta-characters or other badness
1315 if not re_taint_free.match(sig_filename):
1316 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1317 return (None, rejects)
1319 if data_filename and not re_taint_free.match(data_filename):
1320 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1321 return (None, rejects)
1324 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1326 # Autofetch the signing key if that's enabled
1327 if autofetch == None:
1328 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1330 error_msg = retrieve_key(sig_filename)
1332 rejects.append(error_msg)
1333 return (None, rejects)
1335 # Build the command line
1336 status_read, status_write = os.pipe()
1337 cmd = "gpgv --status-fd %s %s %s %s" % (
1338 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1340 # Invoke gpgv on the file
1341 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1343 # Process the status-fd output
1344 (keywords, internal_error) = process_gpgv_output(status)
1346 # If we failed to parse the status-fd output, let's just whine and bail now
1348 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1349 rejects.append(internal_error, "")
1350 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1351 return (None, rejects)
1353 # Now check for obviously bad things in the processed output
1354 if keywords.has_key("KEYREVOKED"):
1355 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1356 if keywords.has_key("BADSIG"):
1357 rejects.append("bad signature on %s." % (sig_filename))
1358 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1359 rejects.append("failed to check signature on %s." % (sig_filename))
1360 if keywords.has_key("NO_PUBKEY"):
1361 args = keywords["NO_PUBKEY"]
1364 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1365 if keywords.has_key("BADARMOR"):
1366 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1367 if keywords.has_key("NODATA"):
1368 rejects.append("no signature found in %s." % (sig_filename))
1369 if keywords.has_key("EXPKEYSIG"):
1370 args = keywords["EXPKEYSIG"]
1373 rejects.append("Signature made by expired key 0x%s" % (key))
1374 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1375 args = keywords["KEYEXPIRED"]
1379 if timestamp.count("T") == 0:
1381 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1383 expiredate = "unknown (%s)" % (timestamp)
1385 expiredate = timestamp
1386 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1388 if len(rejects) > 0:
1389 return (None, rejects)
1391 # Next check gpgv exited with a zero return code
1393 rejects.append("gpgv failed while checking %s." % (sig_filename))
1395 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1397 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1398 return (None, rejects)
1400 # Sanity check the good stuff we expect
1401 if not keywords.has_key("VALIDSIG"):
1402 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1404 args = keywords["VALIDSIG"]
1406 rejects.append("internal error while checking signature on %s." % (sig_filename))
1408 fingerprint = args[0]
1409 if not keywords.has_key("GOODSIG"):
1410 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1411 if not keywords.has_key("SIG_ID"):
1412 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1414 # Finally ensure there's not something we don't recognise
1415 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1416 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1417 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1419 for keyword in keywords.keys():
1420 if not known_keywords.has_key(keyword):
1421 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1423 if len(rejects) > 0:
1424 return (None, rejects)
1426 return (fingerprint, [])
1428 ################################################################################
1430 def gpg_get_key_addresses(fingerprint):
1431 """retreive email addresses from gpg key uids for a given fingerprint"""
1432 addresses = key_uid_email_cache.get(fingerprint)
1433 if addresses != None:
1436 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1437 % (gpg_keyring_args(), fingerprint)
1438 (result, output) = commands.getstatusoutput(cmd)
1440 for l in output.split('\n'):
1441 m = re_gpg_uid.match(l)
1443 addresses.add(m.group(1))
1444 key_uid_email_cache[fingerprint] = addresses
1447 ################################################################################
1449 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1451 def wrap(paragraph, max_length, prefix=""):
1455 words = paragraph.split()
1458 word_size = len(word)
1459 if word_size > max_length:
1461 s += line + '\n' + prefix
1462 s += word + '\n' + prefix
1465 new_length = len(line) + word_size + 1
1466 if new_length > max_length:
1467 s += line + '\n' + prefix
1480 ################################################################################
1482 def clean_symlink (src, dest, root):
1484 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1487 src = src.replace(root, '', 1)
1488 dest = dest.replace(root, '', 1)
1489 dest = os.path.dirname(dest)
1490 new_src = '../' * len(dest.split('/'))
1491 return new_src + src
1493 ################################################################################
1495 def temp_filename(directory=None, prefix="dak", suffix=""):
1497 Return a secure and unique filename by pre-creating it.
1498 If 'directory' is non-null, it will be the directory the file is pre-created in.
1499 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1500 If 'suffix' is non-null, the filename will end with it.
1502 Returns a pair (fd, name).
1505 return tempfile.mkstemp(suffix, prefix, directory)
1507 ################################################################################
1509 def temp_dirname(parent=None, prefix="dak", suffix=""):
1511 Return a secure and unique directory by pre-creating it.
1512 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1513 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1514 If 'suffix' is non-null, the filename will end with it.
1516 Returns a pathname to the new directory
1519 return tempfile.mkdtemp(suffix, prefix, parent)
1521 ################################################################################
1523 def is_email_alias(email):
1524 """ checks if the user part of the email is listed in the alias file """
1526 if alias_cache == None:
1527 aliasfn = which_alias_file()
1530 for l in open(aliasfn):
1531 alias_cache.add(l.split(':')[0])
1532 uid = email.split('@')[0]
1533 return uid in alias_cache
1535 ################################################################################
1537 def get_changes_files(from_dir):
1539 Takes a directory and lists all .changes files in it (as well as chdir'ing
1540 to the directory; this is due to broken behaviour on the part of p-u/p-a
1541 when you're not in the right place)
1543 Returns a list of filenames
1546 # Much of the rest of p-u/p-a depends on being in the right place
1548 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1550 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1552 return changes_files
1554 ################################################################################
1558 Cnf = apt_pkg.newConfiguration()
1559 if not os.getenv("DAK_TEST"):
1560 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1562 if which_conf_file() != default_config:
1563 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1565 ################################################################################
1567 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1569 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1570 Well, actually it parsed a local copy, but let's document the source
1573 returns a dict associating source package name with a list of open wnpp
1574 bugs (Yes, there might be more than one)
1580 lines = f.readlines()
1582 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1587 splited_line = line.split(": ", 1)
1588 if len(splited_line) > 1:
1589 wnpp[splited_line[0]] = splited_line[1].split("|")
1591 for source in wnpp.keys():
1593 for wnpp_bug in wnpp[source]:
1594 bug_no = re.search("(\d)+", wnpp_bug).group()
1600 ################################################################################
1602 def get_packages_from_ftp(root, suite, component, architecture):
1604 Returns an object containing apt_pkg-parseable data collected by
1605 aggregating Packages.gz files gathered for each architecture.
1608 @param root: path to ftp archive root directory
1611 @param suite: suite to extract files from
1613 @type component: string
1614 @param component: component to extract files from
1616 @type architecture: string
1617 @param architecture: architecture to extract files from
1620 @return: apt_pkg class containing package data
1623 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1624 (fd, temp_file) = temp_filename()
1625 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1627 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1628 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1629 if os.path.exists(filename):
1630 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1632 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1633 packages = open_file(temp_file)
1634 Packages = apt_pkg.ParseTagFile(packages)
1635 os.unlink(temp_file)