2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
40 import email as modemail
43 from dbconn import DBConn, get_architecture, get_component, get_suite, \
44 get_override_type, Keyring, session_wrapper, \
45 get_active_keyring_paths, get_primary_keyring_path
46 from sqlalchemy import desc
47 from dak_exceptions import *
48 from gpg import SignedFile
49 from textutils import fix_maintainer
50 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
51 re_multi_line_field, re_srchasver, re_taint_free, \
52 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
55 from formats import parse_format, validate_changes_format
56 from srcformats import get_format_from_string
57 from collections import defaultdict
59 ################################################################################
61 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
62 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
64 alias_cache = None #: Cache for email alias checks
65 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
67 # (hashname, function, earliest_changes_version)
68 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
69 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
71 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
72 # code in lenny's Python. This also affects commands.getoutput and
74 def dak_getstatusoutput(cmd):
75 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
76 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
78 output = pipe.stdout.read()
82 if output[-1:] == '\n':
90 commands.getstatusoutput = dak_getstatusoutput
92 ################################################################################
95 """ Escape html chars """
96 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
98 ################################################################################
100 def open_file(filename, mode='r'):
102 Open C{file}, return fileobject.
104 @type filename: string
105 @param filename: path/filename to open
108 @param mode: open mode
111 @return: open fileobject
113 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
117 f = open(filename, mode)
119 raise CantOpenError(filename)
122 ################################################################################
124 def our_raw_input(prompt=""):
128 sys.stdout.write(prompt)
137 sys.stderr.write("\nUser interrupt (^D).\n")
140 ################################################################################
142 def extract_component_from_section(section, session=None):
145 if section.find('/') != -1:
146 component = section.split('/')[0]
148 # Expand default component
150 comp = get_component(section, session)
154 component = comp.component_name
156 return (section, component)
158 ################################################################################
160 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
161 require_signature = True
164 require_signature = False
166 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
167 contents = signed_file.contents
172 # Split the lines in the input, keeping the linebreaks.
173 lines = contents.splitlines(True)
176 raise ParseChangesError("[Empty changes file]")
178 # Reindex by line number so we can easily verify the format of
184 indexed_lines[index] = line[:-1]
186 num_of_lines = len(indexed_lines.keys())
189 while index < num_of_lines:
191 line = indexed_lines[index]
192 if line == "" and signing_rules == 1:
193 if index != num_of_lines:
194 raise InvalidDscError(index)
196 slf = re_single_line_field.match(line)
198 field = slf.groups()[0].lower()
199 changes[field] = slf.groups()[1]
203 changes[field] += '\n'
205 mlf = re_multi_line_field.match(line)
208 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
209 if first == 1 and changes[field] != "":
210 changes[field] += '\n'
212 changes[field] += mlf.groups()[0] + '\n'
216 changes["filecontents"] = armored_contents
218 if changes.has_key("source"):
219 # Strip the source version in brackets from the source field,
220 # put it in the "source-version" field instead.
221 srcver = re_srchasver.search(changes["source"])
223 changes["source"] = srcver.group(1)
224 changes["source-version"] = srcver.group(2)
227 raise ParseChangesError(error)
231 ################################################################################
233 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
235 Parses a changes file and returns a dictionary where each field is a
236 key. The mandatory first argument is the filename of the .changes
239 signing_rules is an optional argument:
241 - If signing_rules == -1, no signature is required.
242 - If signing_rules == 0 (the default), a signature is required.
243 - If signing_rules == 1, it turns on the same strict format checking
246 The rules for (signing_rules == 1)-mode are:
248 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
249 followed by any PGP header data and must end with a blank line.
251 - The data section must end with a blank line and must be followed by
252 "-----BEGIN PGP SIGNATURE-----".
255 changes_in = open_file(filename)
256 content = changes_in.read()
259 unicode(content, 'utf-8')
261 raise ChangesUnicodeError("Changes file not proper utf-8")
262 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
266 # Finally ensure that everything needed for .changes is there
267 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
268 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
271 for keyword in must_keywords:
272 if not changes.has_key(keyword.lower()):
273 missingfields.append(keyword)
275 if len(missingfields):
276 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
280 ################################################################################
282 def hash_key(hashname):
283 return '%ssum' % hashname
285 ################################################################################
287 def create_hash(where, files, hashname, hashfunc):
289 create_hash extends the passed files dict with the given hash by
290 iterating over all files on disk and passing them to the hashing
295 for f in files.keys():
297 file_handle = open_file(f)
298 except CantOpenError:
299 rejmsg.append("Could not open file %s for checksumming" % (f))
302 files[f][hash_key(hashname)] = hashfunc(file_handle)
307 ################################################################################
309 def check_hash(where, files, hashname, hashfunc):
311 check_hash checks the given hash in the files dict against the actual
312 files on disk. The hash values need to be present consistently in
313 all file entries. It does not modify its input in any way.
317 for f in files.keys():
321 file_handle = open_file(f)
323 # Check for the hash entry, to not trigger a KeyError.
324 if not files[f].has_key(hash_key(hashname)):
325 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
329 # Actually check the hash for correctness.
330 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
331 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
333 except CantOpenError:
334 # TODO: This happens when the file is in the pool.
335 # warn("Cannot open file %s" % f)
342 ################################################################################
344 def check_size(where, files):
346 check_size checks the file sizes in the passed files dict against the
351 for f in files.keys():
354 except OSError as exc:
356 # TODO: This happens when the file is in the pool.
360 actual_size = entry[stat.ST_SIZE]
361 size = int(files[f]["size"])
362 if size != actual_size:
363 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
364 % (f, actual_size, size, where))
367 ################################################################################
369 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
371 Verify that the files listed in the Files field of the .dsc are
372 those expected given the announced Format.
374 @type dsc_filename: string
375 @param dsc_filename: path of .dsc file
378 @param dsc: the content of the .dsc parsed by C{parse_changes()}
380 @type dsc_files: dict
381 @param dsc_files: the file list returned by C{build_file_list()}
384 @return: all errors detected
388 # Parse the file if needed
390 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
392 if dsc_files is None:
393 dsc_files = build_file_list(dsc, is_a_dsc=1)
395 # Ensure .dsc lists proper set of source files according to the format
397 has = defaultdict(lambda: 0)
400 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
401 (r'diff.gz', ('debian_diff',)),
402 (r'tar.gz', ('native_tar_gz', 'native_tar')),
403 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
404 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
405 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
406 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
409 for f in dsc_files.keys():
410 m = re_issource.match(f)
412 rejmsg.append("%s: %s in Files field not recognised as source."
416 # Populate 'has' dictionary by resolving keys in lookup table
418 for regex, keys in ftype_lookup:
419 if re.match(regex, m.group(3)):
425 # File does not match anything in lookup table; reject
427 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
429 # Check for multiple files
430 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
431 if has[file_type] > 1:
432 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
434 # Source format specific tests
436 format = get_format_from_string(dsc['format'])
438 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
441 except UnknownFormatError:
442 # Not an error here for now
447 ################################################################################
449 def check_hash_fields(what, manifest):
451 check_hash_fields ensures that there are no checksum fields in the
452 given dict that we do not know about.
456 hashes = map(lambda x: x[0], known_hashes)
457 for field in manifest:
458 if field.startswith("checksums-"):
459 hashname = field.split("-",1)[1]
460 if hashname not in hashes:
461 rejmsg.append("Unsupported checksum field for %s "\
462 "in %s" % (hashname, what))
465 ################################################################################
467 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
468 if format >= version:
469 # The version should contain the specified hash.
472 # Import hashes from the changes
473 rejmsg = parse_checksums(".changes", files, changes, hashname)
477 # We need to calculate the hash because it can't possibly
480 return func(".changes", files, hashname, hashfunc)
482 # We could add the orig which might be in the pool to the files dict to
483 # access the checksums easily.
485 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
487 ensure_dsc_hashes' task is to ensure that each and every *present* hash
488 in the dsc is correct, i.e. identical to the changes file and if necessary
489 the pool. The latter task is delegated to check_hash.
493 if not dsc.has_key('Checksums-%s' % (hashname,)):
495 # Import hashes from the dsc
496 parse_checksums(".dsc", dsc_files, dsc, hashname)
498 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
501 ################################################################################
503 def parse_checksums(where, files, manifest, hashname):
505 field = 'checksums-%s' % hashname
506 if not field in manifest:
508 for line in manifest[field].split('\n'):
511 clist = line.strip().split(' ')
513 checksum, size, checkfile = clist
515 rejmsg.append("Cannot parse checksum line [%s]" % (line))
517 if not files.has_key(checkfile):
518 # TODO: check for the file's entry in the original files dict, not
519 # the one modified by (auto)byhand and other weird stuff
520 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
521 # (file, hashname, where))
523 if not files[checkfile]["size"] == size:
524 rejmsg.append("%s: size differs for files and checksums-%s entry "\
525 "in %s" % (checkfile, hashname, where))
527 files[checkfile][hash_key(hashname)] = checksum
528 for f in files.keys():
529 if not files[f].has_key(hash_key(hashname)):
530 rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
533 ################################################################################
535 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
537 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
540 # Make sure we have a Files: field to parse...
541 if not changes.has_key(field):
542 raise NoFilesFieldError
544 # Validate .changes Format: field
546 validate_changes_format(parse_format(changes['format']), field)
548 includes_section = (not is_a_dsc) and field == "files"
550 # Parse each entry/line:
551 for i in changes[field].split('\n'):
555 section = priority = ""
558 (md5, size, section, priority, name) = s
560 (md5, size, name) = s
562 raise ParseChangesError(i)
569 (section, component) = extract_component_from_section(section)
571 files[name] = dict(size=size, section=section,
572 priority=priority, component=component)
573 files[name][hashname] = md5
577 ################################################################################
579 # see http://bugs.debian.org/619131
580 def build_package_list(dsc, session = None):
581 if not dsc.has_key("package-list"):
586 for line in dsc["package-list"].split("\n"):
590 fields = line.split()
592 package_type = fields[1]
593 (section, component) = extract_component_from_section(fields[2])
596 # Validate type if we have a session
597 if session and get_override_type(package_type, session) is None:
598 # Maybe just warn and ignore? exit(1) might be a bit hard...
599 utils.fubar("invalid type (%s) in Package-List." % (package_type))
601 if name not in packages or packages[name]["type"] == "dsc":
602 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
606 ################################################################################
608 def send_mail (message, filename=""):
609 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
611 # Check whether we're supposed to be sending mail
612 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
615 # If we've been passed a string dump it into a temporary file
617 (fd, filename) = tempfile.mkstemp()
618 os.write (fd, message)
621 if Cnf.has_key("Dinstall::MailWhiteList") and \
622 Cnf["Dinstall::MailWhiteList"] != "":
623 message_in = open_file(filename)
624 message_raw = modemail.message_from_file(message_in)
628 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
630 for line in whitelist_in:
631 if not re_whitespace_comment.match(line):
632 if re_re_mark.match(line):
633 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
635 whitelist.append(re.compile(re.escape(line.strip())))
640 fields = ["To", "Bcc", "Cc"]
643 value = message_raw.get(field, None)
646 for item in value.split(","):
647 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
653 if not mail_whitelisted:
654 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
658 # Doesn't have any mail in whitelist so remove the header
660 del message_raw[field]
662 message_raw.replace_header(field, ', '.join(match))
664 # Change message fields in order if we don't have a To header
665 if not message_raw.has_key("To"):
668 if message_raw.has_key(field):
669 message_raw[fields[-1]] = message_raw[field]
670 del message_raw[field]
673 # Clean up any temporary files
674 # and return, as we removed all recipients.
676 os.unlink (filename);
679 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
680 os.write (fd, message_raw.as_string(True));
684 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
686 raise SendmailFailedError(output)
688 # Clean up any temporary files
692 ################################################################################
694 def poolify (source, component):
697 if source[:3] == "lib":
698 return component + source[:4] + '/' + source + '/'
700 return component + source[:1] + '/' + source + '/'
702 ################################################################################
704 def move (src, dest, overwrite = 0, perms = 0o664):
705 if os.path.exists(dest) and os.path.isdir(dest):
708 dest_dir = os.path.dirname(dest)
709 if not os.path.exists(dest_dir):
710 umask = os.umask(00000)
711 os.makedirs(dest_dir, 0o2775)
713 #print "Moving %s to %s..." % (src, dest)
714 if os.path.exists(dest) and os.path.isdir(dest):
715 dest += '/' + os.path.basename(src)
716 # Don't overwrite unless forced to
717 if os.path.exists(dest):
719 fubar("Can't move %s to %s - file already exists." % (src, dest))
721 if not os.access(dest, os.W_OK):
722 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
723 shutil.copy2(src, dest)
724 os.chmod(dest, perms)
727 def copy (src, dest, overwrite = 0, perms = 0o664):
728 if os.path.exists(dest) and os.path.isdir(dest):
731 dest_dir = os.path.dirname(dest)
732 if not os.path.exists(dest_dir):
733 umask = os.umask(00000)
734 os.makedirs(dest_dir, 0o2775)
736 #print "Copying %s to %s..." % (src, dest)
737 if os.path.exists(dest) and os.path.isdir(dest):
738 dest += '/' + os.path.basename(src)
739 # Don't overwrite unless forced to
740 if os.path.exists(dest):
742 raise FileExistsError
744 if not os.access(dest, os.W_OK):
745 raise CantOverwriteError
746 shutil.copy2(src, dest)
747 os.chmod(dest, perms)
749 ################################################################################
752 res = socket.getfqdn()
753 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
754 if database_hostname:
755 return database_hostname
759 def which_conf_file ():
760 if os.getenv('DAK_CONFIG'):
761 return os.getenv('DAK_CONFIG')
763 res = socket.getfqdn()
764 # In case we allow local config files per user, try if one exists
765 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
766 homedir = os.getenv("HOME")
767 confpath = os.path.join(homedir, "/etc/dak.conf")
768 if os.path.exists(confpath):
769 apt_pkg.ReadConfigFileISC(Cnf,confpath)
771 # We are still in here, so there is no local config file or we do
772 # not allow local files. Do the normal stuff.
773 if Cnf.get("Config::" + res + "::DakConfig"):
774 return Cnf["Config::" + res + "::DakConfig"]
776 return default_config
778 def which_apt_conf_file ():
779 res = socket.getfqdn()
780 # In case we allow local config files per user, try if one exists
781 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
782 homedir = os.getenv("HOME")
783 confpath = os.path.join(homedir, "/etc/dak.conf")
784 if os.path.exists(confpath):
785 apt_pkg.ReadConfigFileISC(Cnf,default_config)
787 if Cnf.get("Config::" + res + "::AptConfig"):
788 return Cnf["Config::" + res + "::AptConfig"]
790 return default_apt_config
792 def which_alias_file():
793 hostname = socket.getfqdn()
794 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
795 if os.path.exists(aliasfn):
800 ################################################################################
802 def TemplateSubst(subst_map, filename):
803 """ Perform a substition of template """
804 templatefile = open_file(filename)
805 template = templatefile.read()
806 for k, v in subst_map.iteritems():
807 template = template.replace(k, str(v))
811 ################################################################################
813 def fubar(msg, exit_code=1):
814 sys.stderr.write("E: %s\n" % (msg))
818 sys.stderr.write("W: %s\n" % (msg))
820 ################################################################################
822 # Returns the user name with a laughable attempt at rfc822 conformancy
823 # (read: removing stray periods).
825 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
828 return pwd.getpwuid(os.getuid())[0]
830 ################################################################################
840 return ("%d%s" % (c, t))
842 ################################################################################
844 def cc_fix_changes (changes):
845 o = changes.get("architecture", "")
847 del changes["architecture"]
848 changes["architecture"] = {}
850 changes["architecture"][j] = 1
852 def changes_compare (a, b):
853 """ Sort by source name, source version, 'have source', and then by filename """
855 a_changes = parse_changes(a)
860 b_changes = parse_changes(b)
864 cc_fix_changes (a_changes)
865 cc_fix_changes (b_changes)
867 # Sort by source name
868 a_source = a_changes.get("source")
869 b_source = b_changes.get("source")
870 q = cmp (a_source, b_source)
874 # Sort by source version
875 a_version = a_changes.get("version", "0")
876 b_version = b_changes.get("version", "0")
877 q = apt_pkg.version_compare(a_version, b_version)
881 # Sort by 'have source'
882 a_has_source = a_changes["architecture"].get("source")
883 b_has_source = b_changes["architecture"].get("source")
884 if a_has_source and not b_has_source:
886 elif b_has_source and not a_has_source:
889 # Fall back to sort by filename
892 ################################################################################
894 def find_next_free (dest, too_many=100):
897 while os.path.exists(dest) and extra < too_many:
898 dest = orig_dest + '.' + repr(extra)
900 if extra >= too_many:
901 raise NoFreeFilenameError
904 ################################################################################
906 def result_join (original, sep = '\t'):
908 for i in xrange(len(original)):
909 if original[i] == None:
910 resultlist.append("")
912 resultlist.append(original[i])
913 return sep.join(resultlist)
915 ################################################################################
917 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
919 for line in str.split('\n'):
921 if line or include_blank_lines:
922 out += "%s%s\n" % (prefix, line)
923 # Strip trailing new line
928 ################################################################################
930 def validate_changes_file_arg(filename, require_changes=1):
932 'filename' is either a .changes or .dak file. If 'filename' is a
933 .dak file, it's changed to be the corresponding .changes file. The
934 function then checks if the .changes file a) exists and b) is
935 readable and returns the .changes filename if so. If there's a
936 problem, the next action depends on the option 'require_changes'
939 - If 'require_changes' == -1, errors are ignored and the .changes
940 filename is returned.
941 - If 'require_changes' == 0, a warning is given and 'None' is returned.
942 - If 'require_changes' == 1, a fatal error is raised.
947 orig_filename = filename
948 if filename.endswith(".dak"):
949 filename = filename[:-4]+".changes"
951 if not filename.endswith(".changes"):
952 error = "invalid file type; not a changes file"
954 if not os.access(filename,os.R_OK):
955 if os.path.exists(filename):
956 error = "permission denied"
958 error = "file not found"
961 if require_changes == 1:
962 fubar("%s: %s." % (orig_filename, error))
963 elif require_changes == 0:
964 warn("Skipping %s - %s" % (orig_filename, error))
966 else: # We only care about the .dak file
971 ################################################################################
974 return (arch != "source" and arch != "all")
976 ################################################################################
978 def join_with_commas_and(list):
979 if len(list) == 0: return "nothing"
980 if len(list) == 1: return list[0]
981 return ", ".join(list[:-1]) + " and " + list[-1]
983 ################################################################################
988 (pkg, version, constraint) = atom
990 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
993 pp_deps.append(pp_dep)
994 return " |".join(pp_deps)
996 ################################################################################
1001 ################################################################################
1003 def parse_args(Options):
1004 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1005 # XXX: This should go away and everything which calls it be converted
1006 # to use SQLA properly. For now, we'll just fix it not to use
1007 # the old Pg interface though
1008 session = DBConn().session()
1010 if Options["Suite"]:
1012 for suitename in split_args(Options["Suite"]):
1013 suite = get_suite(suitename, session=session)
1014 if not suite or suite.suite_id is None:
1015 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
1017 suite_ids_list.append(suite.suite_id)
1019 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1021 fubar("No valid suite given.")
1026 if Options["Component"]:
1027 component_ids_list = []
1028 for componentname in split_args(Options["Component"]):
1029 component = get_component(componentname, session=session)
1030 if component is None:
1031 warn("component '%s' not recognised." % (componentname))
1033 component_ids_list.append(component.component_id)
1034 if component_ids_list:
1035 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1037 fubar("No valid component given.")
1041 # Process architecture
1042 con_architectures = ""
1044 if Options["Architecture"]:
1046 for archname in split_args(Options["Architecture"]):
1047 if archname == "source":
1050 arch = get_architecture(archname, session=session)
1052 warn("architecture '%s' not recognised." % (archname))
1054 arch_ids_list.append(arch.arch_id)
1056 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1058 if not check_source:
1059 fubar("No valid architecture given.")
1063 return (con_suites, con_architectures, con_components, check_source)
1065 ################################################################################
1067 def arch_compare_sw (a, b):
1069 Function for use in sorting lists of architectures.
1071 Sorts normally except that 'source' dominates all others.
1074 if a == "source" and b == "source":
1083 ################################################################################
1085 def split_args (s, dwim=1):
1087 Split command line arguments which can be separated by either commas
1088 or whitespace. If dwim is set, it will complain about string ending
1089 in comma since this usually means someone did 'dak ls -a i386, m68k
1090 foo' or something and the inevitable confusion resulting from 'm68k'
1091 being treated as an argument is undesirable.
1094 if s.find(",") == -1:
1097 if s[-1:] == "," and dwim:
1098 fubar("split_args: found trailing comma, spurious space maybe?")
1101 ################################################################################
1103 def gpgv_get_status_output(cmd, status_read, status_write):
1105 Our very own version of commands.getouputstatus(), hacked to support
1109 cmd = ['/bin/sh', '-c', cmd]
1110 p2cread, p2cwrite = os.pipe()
1111 c2pread, c2pwrite = os.pipe()
1112 errout, errin = os.pipe()
1122 for i in range(3, 256):
1123 if i != status_write:
1129 os.execvp(cmd[0], cmd)
1135 os.dup2(c2pread, c2pwrite)
1136 os.dup2(errout, errin)
1138 output = status = ""
1140 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1143 r = os.read(fd, 8196)
1145 more_data.append(fd)
1146 if fd == c2pwrite or fd == errin:
1148 elif fd == status_read:
1151 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1153 pid, exit_status = os.waitpid(pid, 0)
1155 os.close(status_write)
1156 os.close(status_read)
1166 return output, status, exit_status
1168 ################################################################################
1170 def process_gpgv_output(status):
1171 # Process the status-fd output
1174 for line in status.split('\n'):
1178 split = line.split()
1180 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1182 (gnupg, keyword) = split[:2]
1183 if gnupg != "[GNUPG:]":
1184 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1187 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1188 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1191 keywords[keyword] = args
1193 return (keywords, internal_error)
1195 ################################################################################
1197 def retrieve_key (filename, keyserver=None, keyring=None):
1199 Retrieve the key that signed 'filename' from 'keyserver' and
1200 add it to 'keyring'. Returns nothing on success, or an error message
1204 # Defaults for keyserver and keyring
1206 keyserver = Cnf["Dinstall::KeyServer"]
1208 keyring = get_primary_keyring_path()
1210 # Ensure the filename contains no shell meta-characters or other badness
1211 if not re_taint_free.match(filename):
1212 return "%s: tainted filename" % (filename)
1214 # Invoke gpgv on the file
1215 status_read, status_write = os.pipe()
1216 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1217 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1219 # Process the status-fd output
1220 (keywords, internal_error) = process_gpgv_output(status)
1222 return internal_error
1224 if not keywords.has_key("NO_PUBKEY"):
1225 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1227 fingerprint = keywords["NO_PUBKEY"][0]
1228 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1229 # it'll try to create a lockfile in /dev. A better solution might
1230 # be a tempfile or something.
1231 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1232 % (Cnf["Dinstall::SigningKeyring"])
1233 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1234 % (keyring, keyserver, fingerprint)
1235 (result, output) = commands.getstatusoutput(cmd)
1237 return "'%s' failed with exit code %s" % (cmd, result)
1241 ################################################################################
1243 def gpg_keyring_args(keyrings=None):
1245 keyrings = get_active_keyring_paths()
1247 return " ".join(["--keyring %s" % x for x in keyrings])
1249 ################################################################################
1251 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1253 Check the signature of a file and return the fingerprint if the
1254 signature is valid or 'None' if it's not. The first argument is the
1255 filename whose signature should be checked. The second argument is a
1256 reject function and is called when an error is found. The reject()
1257 function must allow for two arguments: the first is the error message,
1258 the second is an optional prefix string. It's possible for reject()
1259 to be called more than once during an invocation of check_signature().
1260 The third argument is optional and is the name of the files the
1261 detached signature applies to. The fourth argument is optional and is
1262 a *list* of keyrings to use. 'autofetch' can either be None, True or
1263 False. If None, the default behaviour specified in the config will be
1269 # Ensure the filename contains no shell meta-characters or other badness
1270 if not re_taint_free.match(sig_filename):
1271 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1272 return (None, rejects)
1274 if data_filename and not re_taint_free.match(data_filename):
1275 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1276 return (None, rejects)
1279 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1281 # Autofetch the signing key if that's enabled
1282 if autofetch == None:
1283 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1285 error_msg = retrieve_key(sig_filename)
1287 rejects.append(error_msg)
1288 return (None, rejects)
1290 # Build the command line
1291 status_read, status_write = os.pipe()
1292 cmd = "gpgv --status-fd %s %s %s %s" % (
1293 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1295 # Invoke gpgv on the file
1296 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1298 # Process the status-fd output
1299 (keywords, internal_error) = process_gpgv_output(status)
1301 # If we failed to parse the status-fd output, let's just whine and bail now
1303 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1304 rejects.append(internal_error, "")
1305 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1306 return (None, rejects)
1308 # Now check for obviously bad things in the processed output
1309 if keywords.has_key("KEYREVOKED"):
1310 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1311 if keywords.has_key("BADSIG"):
1312 rejects.append("bad signature on %s." % (sig_filename))
1313 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1314 rejects.append("failed to check signature on %s." % (sig_filename))
1315 if keywords.has_key("NO_PUBKEY"):
1316 args = keywords["NO_PUBKEY"]
1319 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1320 if keywords.has_key("BADARMOR"):
1321 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1322 if keywords.has_key("NODATA"):
1323 rejects.append("no signature found in %s." % (sig_filename))
1324 if keywords.has_key("EXPKEYSIG"):
1325 args = keywords["EXPKEYSIG"]
1328 rejects.append("Signature made by expired key 0x%s" % (key))
1329 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1330 args = keywords["KEYEXPIRED"]
1334 if timestamp.count("T") == 0:
1336 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1338 expiredate = "unknown (%s)" % (timestamp)
1340 expiredate = timestamp
1341 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1343 if len(rejects) > 0:
1344 return (None, rejects)
1346 # Next check gpgv exited with a zero return code
1348 rejects.append("gpgv failed while checking %s." % (sig_filename))
1350 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1352 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1353 return (None, rejects)
1355 # Sanity check the good stuff we expect
1356 if not keywords.has_key("VALIDSIG"):
1357 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1359 args = keywords["VALIDSIG"]
1361 rejects.append("internal error while checking signature on %s." % (sig_filename))
1363 fingerprint = args[0]
1364 if not keywords.has_key("GOODSIG"):
1365 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1366 if not keywords.has_key("SIG_ID"):
1367 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1369 # Finally ensure there's not something we don't recognise
1370 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1371 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1372 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1374 for keyword in keywords.keys():
1375 if not known_keywords.has_key(keyword):
1376 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1378 if len(rejects) > 0:
1379 return (None, rejects)
1381 return (fingerprint, [])
1383 ################################################################################
1385 def gpg_get_key_addresses(fingerprint):
1386 """retreive email addresses from gpg key uids for a given fingerprint"""
1387 addresses = key_uid_email_cache.get(fingerprint)
1388 if addresses != None:
1391 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1392 % (gpg_keyring_args(), fingerprint)
1393 (result, output) = commands.getstatusoutput(cmd)
1395 for l in output.split('\n'):
1396 m = re_gpg_uid.match(l)
1398 addresses.append(m.group(1))
1399 key_uid_email_cache[fingerprint] = addresses
1402 ################################################################################
1404 def clean_symlink (src, dest, root):
1406 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1409 src = src.replace(root, '', 1)
1410 dest = dest.replace(root, '', 1)
1411 dest = os.path.dirname(dest)
1412 new_src = '../' * len(dest.split('/'))
1413 return new_src + src
1415 ################################################################################
1417 def temp_filename(directory=None, prefix="dak", suffix=""):
1419 Return a secure and unique filename by pre-creating it.
1420 If 'directory' is non-null, it will be the directory the file is pre-created in.
1421 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1422 If 'suffix' is non-null, the filename will end with it.
1424 Returns a pair (fd, name).
1427 return tempfile.mkstemp(suffix, prefix, directory)
1429 ################################################################################
1431 def temp_dirname(parent=None, prefix="dak", suffix=""):
1433 Return a secure and unique directory by pre-creating it.
1434 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1435 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1436 If 'suffix' is non-null, the filename will end with it.
1438 Returns a pathname to the new directory
1441 return tempfile.mkdtemp(suffix, prefix, parent)
1443 ################################################################################
1445 def is_email_alias(email):
1446 """ checks if the user part of the email is listed in the alias file """
1448 if alias_cache == None:
1449 aliasfn = which_alias_file()
1452 for l in open(aliasfn):
1453 alias_cache.add(l.split(':')[0])
1454 uid = email.split('@')[0]
1455 return uid in alias_cache
1457 ################################################################################
1459 def get_changes_files(from_dir):
1461 Takes a directory and lists all .changes files in it (as well as chdir'ing
1462 to the directory; this is due to broken behaviour on the part of p-u/p-a
1463 when you're not in the right place)
1465 Returns a list of filenames
1468 # Much of the rest of p-u/p-a depends on being in the right place
1470 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1471 except OSError as e:
1472 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1474 return changes_files
1476 ################################################################################
1480 Cnf = apt_pkg.Configuration()
1481 if not os.getenv("DAK_TEST"):
1482 apt_pkg.read_config_file_isc(Cnf,default_config)
1484 if which_conf_file() != default_config:
1485 apt_pkg.read_config_file_isc(Cnf,which_conf_file())
1487 ################################################################################
1489 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1491 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1492 Well, actually it parsed a local copy, but let's document the source
1495 returns a dict associating source package name with a list of open wnpp
1496 bugs (Yes, there might be more than one)
1502 lines = f.readlines()
1503 except IOError as e:
1504 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1509 splited_line = line.split(": ", 1)
1510 if len(splited_line) > 1:
1511 wnpp[splited_line[0]] = splited_line[1].split("|")
1513 for source in wnpp.keys():
1515 for wnpp_bug in wnpp[source]:
1516 bug_no = re.search("(\d)+", wnpp_bug).group()
1522 ################################################################################
1524 def get_packages_from_ftp(root, suite, component, architecture):
1526 Returns an object containing apt_pkg-parseable data collected by
1527 aggregating Packages.gz files gathered for each architecture.
1530 @param root: path to ftp archive root directory
1533 @param suite: suite to extract files from
1535 @type component: string
1536 @param component: component to extract files from
1538 @type architecture: string
1539 @param architecture: architecture to extract files from
1542 @return: apt_pkg class containing package data
1545 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1546 (fd, temp_file) = temp_filename()
1547 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1549 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1550 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1551 if os.path.exists(filename):
1552 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1554 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1555 packages = open_file(temp_file)
1556 Packages = apt_pkg.ParseTagFile(packages)
1557 os.unlink(temp_file)
1560 ################################################################################
1562 def deb_extract_control(fh):
1563 """extract DEBIAN/control from a binary package"""
1564 return apt_inst.DebFile(fh).control.extractdata("control")