2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
39 import email as modemail
42 from dbconn import DBConn, get_architecture, get_component, get_suite, \
43 get_override_type, Keyring, session_wrapper, \
44 get_active_keyring_paths, get_primary_keyring_path
45 from sqlalchemy import desc
46 from dak_exceptions import *
47 from gpg import SignedFile
48 from textutils import fix_maintainer
49 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
50 re_multi_line_field, re_srchasver, re_taint_free, \
51 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
54 from formats import parse_format, validate_changes_format
55 from srcformats import get_format_from_string
56 from collections import defaultdict
58 ################################################################################
60 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
61 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
63 alias_cache = None #: Cache for email alias checks
64 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
66 # (hashname, function, earliest_changes_version)
67 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
68 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
70 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
71 # code in lenny's Python. This also affects commands.getoutput and
73 def dak_getstatusoutput(cmd):
74 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
75 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
77 output = pipe.stdout.read()
81 if output[-1:] == '\n':
89 commands.getstatusoutput = dak_getstatusoutput
91 ################################################################################
94 """ Escape html chars """
95 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
97 ################################################################################
99 def open_file(filename, mode='r'):
101 Open C{file}, return fileobject.
103 @type filename: string
104 @param filename: path/filename to open
107 @param mode: open mode
110 @return: open fileobject
112 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
116 f = open(filename, mode)
118 raise CantOpenError(filename)
121 ################################################################################
123 def our_raw_input(prompt=""):
127 sys.stdout.write(prompt)
136 sys.stderr.write("\nUser interrupt (^D).\n")
139 ################################################################################
141 def extract_component_from_section(section, session=None):
144 if section.find('/') != -1:
145 component = section.split('/')[0]
147 # Expand default component
149 comp = get_component(section, session)
153 component = comp.component_name
155 return (section, component)
157 ################################################################################
159 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
160 require_signature = True
163 require_signature = False
165 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
166 contents = signed_file.contents
171 # Split the lines in the input, keeping the linebreaks.
172 lines = contents.splitlines(True)
175 raise ParseChangesError("[Empty changes file]")
177 # Reindex by line number so we can easily verify the format of
183 indexed_lines[index] = line[:-1]
185 num_of_lines = len(indexed_lines.keys())
188 while index < num_of_lines:
190 line = indexed_lines[index]
191 if line == "" and signing_rules == 1:
192 if index != num_of_lines:
193 raise InvalidDscError(index)
195 slf = re_single_line_field.match(line)
197 field = slf.groups()[0].lower()
198 changes[field] = slf.groups()[1]
202 changes[field] += '\n'
204 mlf = re_multi_line_field.match(line)
207 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
208 if first == 1 and changes[field] != "":
209 changes[field] += '\n'
211 changes[field] += mlf.groups()[0] + '\n'
215 changes["filecontents"] = armored_contents
217 if changes.has_key("source"):
218 # Strip the source version in brackets from the source field,
219 # put it in the "source-version" field instead.
220 srcver = re_srchasver.search(changes["source"])
222 changes["source"] = srcver.group(1)
223 changes["source-version"] = srcver.group(2)
226 raise ParseChangesError(error)
230 ################################################################################
232 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
234 Parses a changes file and returns a dictionary where each field is a
235 key. The mandatory first argument is the filename of the .changes
238 signing_rules is an optional argument:
240 - If signing_rules == -1, no signature is required.
241 - If signing_rules == 0 (the default), a signature is required.
242 - If signing_rules == 1, it turns on the same strict format checking
245 The rules for (signing_rules == 1)-mode are:
247 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
248 followed by any PGP header data and must end with a blank line.
250 - The data section must end with a blank line and must be followed by
251 "-----BEGIN PGP SIGNATURE-----".
254 changes_in = open_file(filename)
255 content = changes_in.read()
258 unicode(content, 'utf-8')
260 raise ChangesUnicodeError("Changes file not proper utf-8")
261 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
265 # Finally ensure that everything needed for .changes is there
266 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
267 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
270 for keyword in must_keywords:
271 if not changes.has_key(keyword.lower()):
272 missingfields.append(keyword)
274 if len(missingfields):
275 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
279 ################################################################################
281 def hash_key(hashname):
282 return '%ssum' % hashname
284 ################################################################################
286 def create_hash(where, files, hashname, hashfunc):
288 create_hash extends the passed files dict with the given hash by
289 iterating over all files on disk and passing them to the hashing
294 for f in files.keys():
296 file_handle = open_file(f)
297 except CantOpenError:
298 rejmsg.append("Could not open file %s for checksumming" % (f))
301 files[f][hash_key(hashname)] = hashfunc(file_handle)
306 ################################################################################
308 def check_hash(where, files, hashname, hashfunc):
310 check_hash checks the given hash in the files dict against the actual
311 files on disk. The hash values need to be present consistently in
312 all file entries. It does not modify its input in any way.
316 for f in files.keys():
320 file_handle = open_file(f)
322 # Check for the hash entry, to not trigger a KeyError.
323 if not files[f].has_key(hash_key(hashname)):
324 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
328 # Actually check the hash for correctness.
329 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
330 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
332 except CantOpenError:
333 # TODO: This happens when the file is in the pool.
334 # warn("Cannot open file %s" % f)
341 ################################################################################
343 def check_size(where, files):
345 check_size checks the file sizes in the passed files dict against the
350 for f in files.keys():
353 except OSError as exc:
355 # TODO: This happens when the file is in the pool.
359 actual_size = entry[stat.ST_SIZE]
360 size = int(files[f]["size"])
361 if size != actual_size:
362 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
363 % (f, actual_size, size, where))
366 ################################################################################
368 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
370 Verify that the files listed in the Files field of the .dsc are
371 those expected given the announced Format.
373 @type dsc_filename: string
374 @param dsc_filename: path of .dsc file
377 @param dsc: the content of the .dsc parsed by C{parse_changes()}
379 @type dsc_files: dict
380 @param dsc_files: the file list returned by C{build_file_list()}
383 @return: all errors detected
387 # Parse the file if needed
389 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
391 if dsc_files is None:
392 dsc_files = build_file_list(dsc, is_a_dsc=1)
394 # Ensure .dsc lists proper set of source files according to the format
396 has = defaultdict(lambda: 0)
399 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
400 (r'diff.gz', ('debian_diff',)),
401 (r'tar.gz', ('native_tar_gz', 'native_tar')),
402 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
403 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
404 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
405 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
408 for f in dsc_files.keys():
409 m = re_issource.match(f)
411 rejmsg.append("%s: %s in Files field not recognised as source."
415 # Populate 'has' dictionary by resolving keys in lookup table
417 for regex, keys in ftype_lookup:
418 if re.match(regex, m.group(3)):
424 # File does not match anything in lookup table; reject
426 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
428 # Check for multiple files
429 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
430 if has[file_type] > 1:
431 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
433 # Source format specific tests
435 format = get_format_from_string(dsc['format'])
437 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
440 except UnknownFormatError:
441 # Not an error here for now
446 ################################################################################
448 def check_hash_fields(what, manifest):
450 check_hash_fields ensures that there are no checksum fields in the
451 given dict that we do not know about.
455 hashes = map(lambda x: x[0], known_hashes)
456 for field in manifest:
457 if field.startswith("checksums-"):
458 hashname = field.split("-",1)[1]
459 if hashname not in hashes:
460 rejmsg.append("Unsupported checksum field for %s "\
461 "in %s" % (hashname, what))
464 ################################################################################
466 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
467 if format >= version:
468 # The version should contain the specified hash.
471 # Import hashes from the changes
472 rejmsg = parse_checksums(".changes", files, changes, hashname)
476 # We need to calculate the hash because it can't possibly
479 return func(".changes", files, hashname, hashfunc)
481 # We could add the orig which might be in the pool to the files dict to
482 # access the checksums easily.
484 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
486 ensure_dsc_hashes' task is to ensure that each and every *present* hash
487 in the dsc is correct, i.e. identical to the changes file and if necessary
488 the pool. The latter task is delegated to check_hash.
492 if not dsc.has_key('Checksums-%s' % (hashname,)):
494 # Import hashes from the dsc
495 parse_checksums(".dsc", dsc_files, dsc, hashname)
497 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
500 ################################################################################
502 def parse_checksums(where, files, manifest, hashname):
504 field = 'checksums-%s' % hashname
505 if not field in manifest:
507 for line in manifest[field].split('\n'):
510 clist = line.strip().split(' ')
512 checksum, size, checkfile = clist
514 rejmsg.append("Cannot parse checksum line [%s]" % (line))
516 if not files.has_key(checkfile):
517 # TODO: check for the file's entry in the original files dict, not
518 # the one modified by (auto)byhand and other weird stuff
519 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
520 # (file, hashname, where))
522 if not files[checkfile]["size"] == size:
523 rejmsg.append("%s: size differs for files and checksums-%s entry "\
524 "in %s" % (checkfile, hashname, where))
526 files[checkfile][hash_key(hashname)] = checksum
527 for f in files.keys():
528 if not files[f].has_key(hash_key(hashname)):
529 rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
532 ################################################################################
534 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
536 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
539 # Make sure we have a Files: field to parse...
540 if not changes.has_key(field):
541 raise NoFilesFieldError
543 # Validate .changes Format: field
545 validate_changes_format(parse_format(changes['format']), field)
547 includes_section = (not is_a_dsc) and field == "files"
549 # Parse each entry/line:
550 for i in changes[field].split('\n'):
554 section = priority = ""
557 (md5, size, section, priority, name) = s
559 (md5, size, name) = s
561 raise ParseChangesError(i)
568 (section, component) = extract_component_from_section(section)
570 files[name] = dict(size=size, section=section,
571 priority=priority, component=component)
572 files[name][hashname] = md5
576 ################################################################################
578 # see http://bugs.debian.org/619131
579 def build_package_list(dsc, session = None):
580 if not dsc.has_key("package-list"):
585 for line in dsc["package-list"].split("\n"):
589 fields = line.split()
591 package_type = fields[1]
592 (section, component) = extract_component_from_section(fields[2])
595 # Validate type if we have a session
596 if session and get_override_type(package_type, session) is None:
597 # Maybe just warn and ignore? exit(1) might be a bit hard...
598 utils.fubar("invalid type (%s) in Package-List." % (package_type))
600 if name not in packages or packages[name]["type"] == "dsc":
601 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
605 ################################################################################
607 def send_mail (message, filename=""):
608 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
610 # Check whether we're supposed to be sending mail
611 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
614 # If we've been passed a string dump it into a temporary file
616 (fd, filename) = tempfile.mkstemp()
617 os.write (fd, message)
620 if Cnf.has_key("Dinstall::MailWhiteList") and \
621 Cnf["Dinstall::MailWhiteList"] != "":
622 message_in = open_file(filename)
623 message_raw = modemail.message_from_file(message_in)
627 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
629 for line in whitelist_in:
630 if not re_whitespace_comment.match(line):
631 if re_re_mark.match(line):
632 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
634 whitelist.append(re.compile(re.escape(line.strip())))
639 fields = ["To", "Bcc", "Cc"]
642 value = message_raw.get(field, None)
645 for item in value.split(","):
646 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
652 if not mail_whitelisted:
653 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
657 # Doesn't have any mail in whitelist so remove the header
659 del message_raw[field]
661 message_raw.replace_header(field, ', '.join(match))
663 # Change message fields in order if we don't have a To header
664 if not message_raw.has_key("To"):
667 if message_raw.has_key(field):
668 message_raw[fields[-1]] = message_raw[field]
669 del message_raw[field]
672 # Clean up any temporary files
673 # and return, as we removed all recipients.
675 os.unlink (filename);
678 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
679 os.write (fd, message_raw.as_string(True));
683 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
685 raise SendmailFailedError(output)
687 # Clean up any temporary files
691 ################################################################################
693 def poolify (source, component):
696 if source[:3] == "lib":
697 return component + source[:4] + '/' + source + '/'
699 return component + source[:1] + '/' + source + '/'
701 ################################################################################
703 def move (src, dest, overwrite = 0, perms = 0o664):
704 if os.path.exists(dest) and os.path.isdir(dest):
707 dest_dir = os.path.dirname(dest)
708 if not os.path.exists(dest_dir):
709 umask = os.umask(00000)
710 os.makedirs(dest_dir, 0o2775)
712 #print "Moving %s to %s..." % (src, dest)
713 if os.path.exists(dest) and os.path.isdir(dest):
714 dest += '/' + os.path.basename(src)
715 # Don't overwrite unless forced to
716 if os.path.exists(dest):
718 fubar("Can't move %s to %s - file already exists." % (src, dest))
720 if not os.access(dest, os.W_OK):
721 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
722 shutil.copy2(src, dest)
723 os.chmod(dest, perms)
726 def copy (src, dest, overwrite = 0, perms = 0o664):
727 if os.path.exists(dest) and os.path.isdir(dest):
730 dest_dir = os.path.dirname(dest)
731 if not os.path.exists(dest_dir):
732 umask = os.umask(00000)
733 os.makedirs(dest_dir, 0o2775)
735 #print "Copying %s to %s..." % (src, dest)
736 if os.path.exists(dest) and os.path.isdir(dest):
737 dest += '/' + os.path.basename(src)
738 # Don't overwrite unless forced to
739 if os.path.exists(dest):
741 raise FileExistsError
743 if not os.access(dest, os.W_OK):
744 raise CantOverwriteError
745 shutil.copy2(src, dest)
746 os.chmod(dest, perms)
748 ################################################################################
751 res = socket.getfqdn()
752 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
753 if database_hostname:
754 return database_hostname
758 def which_conf_file ():
759 if os.getenv('DAK_CONFIG'):
760 return os.getenv('DAK_CONFIG')
762 res = socket.getfqdn()
763 # In case we allow local config files per user, try if one exists
764 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
765 homedir = os.getenv("HOME")
766 confpath = os.path.join(homedir, "/etc/dak.conf")
767 if os.path.exists(confpath):
768 apt_pkg.ReadConfigFileISC(Cnf,default_config)
770 # We are still in here, so there is no local config file or we do
771 # not allow local files. Do the normal stuff.
772 if Cnf.get("Config::" + res + "::DakConfig"):
773 return Cnf["Config::" + res + "::DakConfig"]
775 return default_config
777 def which_apt_conf_file ():
778 res = socket.getfqdn()
779 # In case we allow local config files per user, try if one exists
780 if Cnf.FindB("Config::" + res + "::AllowLocalConfig"):
781 homedir = os.getenv("HOME")
782 confpath = os.path.join(homedir, "/etc/dak.conf")
783 if os.path.exists(confpath):
784 apt_pkg.ReadConfigFileISC(Cnf,default_config)
786 if Cnf.get("Config::" + res + "::AptConfig"):
787 return Cnf["Config::" + res + "::AptConfig"]
789 return default_apt_config
791 def which_alias_file():
792 hostname = socket.getfqdn()
793 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
794 if os.path.exists(aliasfn):
799 ################################################################################
801 def TemplateSubst(subst_map, filename):
802 """ Perform a substition of template """
803 templatefile = open_file(filename)
804 template = templatefile.read()
805 for k, v in subst_map.iteritems():
806 template = template.replace(k, str(v))
810 ################################################################################
812 def fubar(msg, exit_code=1):
813 sys.stderr.write("E: %s\n" % (msg))
817 sys.stderr.write("W: %s\n" % (msg))
819 ################################################################################
821 # Returns the user name with a laughable attempt at rfc822 conformancy
822 # (read: removing stray periods).
824 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
827 return pwd.getpwuid(os.getuid())[0]
829 ################################################################################
839 return ("%d%s" % (c, t))
841 ################################################################################
843 def cc_fix_changes (changes):
844 o = changes.get("architecture", "")
846 del changes["architecture"]
847 changes["architecture"] = {}
849 changes["architecture"][j] = 1
851 def changes_compare (a, b):
852 """ Sort by source name, source version, 'have source', and then by filename """
854 a_changes = parse_changes(a)
859 b_changes = parse_changes(b)
863 cc_fix_changes (a_changes)
864 cc_fix_changes (b_changes)
866 # Sort by source name
867 a_source = a_changes.get("source")
868 b_source = b_changes.get("source")
869 q = cmp (a_source, b_source)
873 # Sort by source version
874 a_version = a_changes.get("version", "0")
875 b_version = b_changes.get("version", "0")
876 q = apt_pkg.VersionCompare(a_version, b_version)
880 # Sort by 'have source'
881 a_has_source = a_changes["architecture"].get("source")
882 b_has_source = b_changes["architecture"].get("source")
883 if a_has_source and not b_has_source:
885 elif b_has_source and not a_has_source:
888 # Fall back to sort by filename
891 ################################################################################
893 def find_next_free (dest, too_many=100):
896 while os.path.exists(dest) and extra < too_many:
897 dest = orig_dest + '.' + repr(extra)
899 if extra >= too_many:
900 raise NoFreeFilenameError
903 ################################################################################
905 def result_join (original, sep = '\t'):
907 for i in xrange(len(original)):
908 if original[i] == None:
909 resultlist.append("")
911 resultlist.append(original[i])
912 return sep.join(resultlist)
914 ################################################################################
916 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
918 for line in str.split('\n'):
920 if line or include_blank_lines:
921 out += "%s%s\n" % (prefix, line)
922 # Strip trailing new line
927 ################################################################################
929 def validate_changes_file_arg(filename, require_changes=1):
931 'filename' is either a .changes or .dak file. If 'filename' is a
932 .dak file, it's changed to be the corresponding .changes file. The
933 function then checks if the .changes file a) exists and b) is
934 readable and returns the .changes filename if so. If there's a
935 problem, the next action depends on the option 'require_changes'
938 - If 'require_changes' == -1, errors are ignored and the .changes
939 filename is returned.
940 - If 'require_changes' == 0, a warning is given and 'None' is returned.
941 - If 'require_changes' == 1, a fatal error is raised.
946 orig_filename = filename
947 if filename.endswith(".dak"):
948 filename = filename[:-4]+".changes"
950 if not filename.endswith(".changes"):
951 error = "invalid file type; not a changes file"
953 if not os.access(filename,os.R_OK):
954 if os.path.exists(filename):
955 error = "permission denied"
957 error = "file not found"
960 if require_changes == 1:
961 fubar("%s: %s." % (orig_filename, error))
962 elif require_changes == 0:
963 warn("Skipping %s - %s" % (orig_filename, error))
965 else: # We only care about the .dak file
970 ################################################################################
973 return (arch != "source" and arch != "all")
975 ################################################################################
977 def join_with_commas_and(list):
978 if len(list) == 0: return "nothing"
979 if len(list) == 1: return list[0]
980 return ", ".join(list[:-1]) + " and " + list[-1]
982 ################################################################################
987 (pkg, version, constraint) = atom
989 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
992 pp_deps.append(pp_dep)
993 return " |".join(pp_deps)
995 ################################################################################
1000 ################################################################################
1002 def parse_args(Options):
1003 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1004 # XXX: This should go away and everything which calls it be converted
1005 # to use SQLA properly. For now, we'll just fix it not to use
1006 # the old Pg interface though
1007 session = DBConn().session()
1009 if Options["Suite"]:
1011 for suitename in split_args(Options["Suite"]):
1012 suite = get_suite(suitename, session=session)
1013 if not suite or suite.suite_id is None:
1014 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
1016 suite_ids_list.append(suite.suite_id)
1018 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1020 fubar("No valid suite given.")
1025 if Options["Component"]:
1026 component_ids_list = []
1027 for componentname in split_args(Options["Component"]):
1028 component = get_component(componentname, session=session)
1029 if component is None:
1030 warn("component '%s' not recognised." % (componentname))
1032 component_ids_list.append(component.component_id)
1033 if component_ids_list:
1034 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1036 fubar("No valid component given.")
1040 # Process architecture
1041 con_architectures = ""
1043 if Options["Architecture"]:
1045 for archname in split_args(Options["Architecture"]):
1046 if archname == "source":
1049 arch = get_architecture(archname, session=session)
1051 warn("architecture '%s' not recognised." % (archname))
1053 arch_ids_list.append(arch.arch_id)
1055 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1057 if not check_source:
1058 fubar("No valid architecture given.")
1062 return (con_suites, con_architectures, con_components, check_source)
1064 ################################################################################
1066 # Inspired(tm) by Bryn Keller's print_exc_plus (See
1067 # http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/52215)
1070 tb = sys.exc_info()[2]
1077 frame = frame.f_back
1079 traceback.print_exc()
1081 print "\nFrame %s in %s at line %s" % (frame.f_code.co_name,
1082 frame.f_code.co_filename,
1084 for key, value in frame.f_locals.items():
1085 print "\t%20s = " % key,
1089 print "<unable to print>"
1091 ################################################################################
1093 def try_with_debug(function):
1101 ################################################################################
1103 def arch_compare_sw (a, b):
1105 Function for use in sorting lists of architectures.
1107 Sorts normally except that 'source' dominates all others.
1110 if a == "source" and b == "source":
1119 ################################################################################
1121 def split_args (s, dwim=1):
1123 Split command line arguments which can be separated by either commas
1124 or whitespace. If dwim is set, it will complain about string ending
1125 in comma since this usually means someone did 'dak ls -a i386, m68k
1126 foo' or something and the inevitable confusion resulting from 'm68k'
1127 being treated as an argument is undesirable.
1130 if s.find(",") == -1:
1133 if s[-1:] == "," and dwim:
1134 fubar("split_args: found trailing comma, spurious space maybe?")
1137 ################################################################################
1139 def gpgv_get_status_output(cmd, status_read, status_write):
1141 Our very own version of commands.getouputstatus(), hacked to support
1145 cmd = ['/bin/sh', '-c', cmd]
1146 p2cread, p2cwrite = os.pipe()
1147 c2pread, c2pwrite = os.pipe()
1148 errout, errin = os.pipe()
1158 for i in range(3, 256):
1159 if i != status_write:
1165 os.execvp(cmd[0], cmd)
1171 os.dup2(c2pread, c2pwrite)
1172 os.dup2(errout, errin)
1174 output = status = ""
1176 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1179 r = os.read(fd, 8196)
1181 more_data.append(fd)
1182 if fd == c2pwrite or fd == errin:
1184 elif fd == status_read:
1187 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1189 pid, exit_status = os.waitpid(pid, 0)
1191 os.close(status_write)
1192 os.close(status_read)
1202 return output, status, exit_status
1204 ################################################################################
1206 def process_gpgv_output(status):
1207 # Process the status-fd output
1210 for line in status.split('\n'):
1214 split = line.split()
1216 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1218 (gnupg, keyword) = split[:2]
1219 if gnupg != "[GNUPG:]":
1220 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1223 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1224 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1227 keywords[keyword] = args
1229 return (keywords, internal_error)
1231 ################################################################################
1233 def retrieve_key (filename, keyserver=None, keyring=None):
1235 Retrieve the key that signed 'filename' from 'keyserver' and
1236 add it to 'keyring'. Returns nothing on success, or an error message
1240 # Defaults for keyserver and keyring
1242 keyserver = Cnf["Dinstall::KeyServer"]
1244 keyring = get_primary_keyring_path()
1246 # Ensure the filename contains no shell meta-characters or other badness
1247 if not re_taint_free.match(filename):
1248 return "%s: tainted filename" % (filename)
1250 # Invoke gpgv on the file
1251 status_read, status_write = os.pipe()
1252 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1253 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1255 # Process the status-fd output
1256 (keywords, internal_error) = process_gpgv_output(status)
1258 return internal_error
1260 if not keywords.has_key("NO_PUBKEY"):
1261 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1263 fingerprint = keywords["NO_PUBKEY"][0]
1264 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1265 # it'll try to create a lockfile in /dev. A better solution might
1266 # be a tempfile or something.
1267 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1268 % (Cnf["Dinstall::SigningKeyring"])
1269 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1270 % (keyring, keyserver, fingerprint)
1271 (result, output) = commands.getstatusoutput(cmd)
1273 return "'%s' failed with exit code %s" % (cmd, result)
1277 ################################################################################
1279 def gpg_keyring_args(keyrings=None):
1281 keyrings = get_active_keyring_paths()
1283 return " ".join(["--keyring %s" % x for x in keyrings])
1285 ################################################################################
1287 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1289 Check the signature of a file and return the fingerprint if the
1290 signature is valid or 'None' if it's not. The first argument is the
1291 filename whose signature should be checked. The second argument is a
1292 reject function and is called when an error is found. The reject()
1293 function must allow for two arguments: the first is the error message,
1294 the second is an optional prefix string. It's possible for reject()
1295 to be called more than once during an invocation of check_signature().
1296 The third argument is optional and is the name of the files the
1297 detached signature applies to. The fourth argument is optional and is
1298 a *list* of keyrings to use. 'autofetch' can either be None, True or
1299 False. If None, the default behaviour specified in the config will be
1305 # Ensure the filename contains no shell meta-characters or other badness
1306 if not re_taint_free.match(sig_filename):
1307 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1308 return (None, rejects)
1310 if data_filename and not re_taint_free.match(data_filename):
1311 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1312 return (None, rejects)
1315 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1317 # Autofetch the signing key if that's enabled
1318 if autofetch == None:
1319 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1321 error_msg = retrieve_key(sig_filename)
1323 rejects.append(error_msg)
1324 return (None, rejects)
1326 # Build the command line
1327 status_read, status_write = os.pipe()
1328 cmd = "gpgv --status-fd %s %s %s %s" % (
1329 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1331 # Invoke gpgv on the file
1332 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1334 # Process the status-fd output
1335 (keywords, internal_error) = process_gpgv_output(status)
1337 # If we failed to parse the status-fd output, let's just whine and bail now
1339 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1340 rejects.append(internal_error, "")
1341 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1342 return (None, rejects)
1344 # Now check for obviously bad things in the processed output
1345 if keywords.has_key("KEYREVOKED"):
1346 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1347 if keywords.has_key("BADSIG"):
1348 rejects.append("bad signature on %s." % (sig_filename))
1349 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1350 rejects.append("failed to check signature on %s." % (sig_filename))
1351 if keywords.has_key("NO_PUBKEY"):
1352 args = keywords["NO_PUBKEY"]
1355 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1356 if keywords.has_key("BADARMOR"):
1357 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1358 if keywords.has_key("NODATA"):
1359 rejects.append("no signature found in %s." % (sig_filename))
1360 if keywords.has_key("EXPKEYSIG"):
1361 args = keywords["EXPKEYSIG"]
1364 rejects.append("Signature made by expired key 0x%s" % (key))
1365 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1366 args = keywords["KEYEXPIRED"]
1370 if timestamp.count("T") == 0:
1372 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1374 expiredate = "unknown (%s)" % (timestamp)
1376 expiredate = timestamp
1377 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1379 if len(rejects) > 0:
1380 return (None, rejects)
1382 # Next check gpgv exited with a zero return code
1384 rejects.append("gpgv failed while checking %s." % (sig_filename))
1386 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1388 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1389 return (None, rejects)
1391 # Sanity check the good stuff we expect
1392 if not keywords.has_key("VALIDSIG"):
1393 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1395 args = keywords["VALIDSIG"]
1397 rejects.append("internal error while checking signature on %s." % (sig_filename))
1399 fingerprint = args[0]
1400 if not keywords.has_key("GOODSIG"):
1401 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1402 if not keywords.has_key("SIG_ID"):
1403 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1405 # Finally ensure there's not something we don't recognise
1406 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1407 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1408 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1410 for keyword in keywords.keys():
1411 if not known_keywords.has_key(keyword):
1412 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1414 if len(rejects) > 0:
1415 return (None, rejects)
1417 return (fingerprint, [])
1419 ################################################################################
1421 def gpg_get_key_addresses(fingerprint):
1422 """retreive email addresses from gpg key uids for a given fingerprint"""
1423 addresses = key_uid_email_cache.get(fingerprint)
1424 if addresses != None:
1427 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1428 % (gpg_keyring_args(), fingerprint)
1429 (result, output) = commands.getstatusoutput(cmd)
1431 for l in output.split('\n'):
1432 m = re_gpg_uid.match(l)
1434 addresses.add(m.group(1))
1435 key_uid_email_cache[fingerprint] = addresses
1438 ################################################################################
1440 # Inspired(tm) by http://www.zopelabs.com/cookbook/1022242603
1442 def wrap(paragraph, max_length, prefix=""):
1446 words = paragraph.split()
1449 word_size = len(word)
1450 if word_size > max_length:
1452 s += line + '\n' + prefix
1453 s += word + '\n' + prefix
1456 new_length = len(line) + word_size + 1
1457 if new_length > max_length:
1458 s += line + '\n' + prefix
1471 ################################################################################
1473 def clean_symlink (src, dest, root):
1475 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1478 src = src.replace(root, '', 1)
1479 dest = dest.replace(root, '', 1)
1480 dest = os.path.dirname(dest)
1481 new_src = '../' * len(dest.split('/'))
1482 return new_src + src
1484 ################################################################################
1486 def temp_filename(directory=None, prefix="dak", suffix=""):
1488 Return a secure and unique filename by pre-creating it.
1489 If 'directory' is non-null, it will be the directory the file is pre-created in.
1490 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1491 If 'suffix' is non-null, the filename will end with it.
1493 Returns a pair (fd, name).
1496 return tempfile.mkstemp(suffix, prefix, directory)
1498 ################################################################################
1500 def temp_dirname(parent=None, prefix="dak", suffix=""):
1502 Return a secure and unique directory by pre-creating it.
1503 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1504 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1505 If 'suffix' is non-null, the filename will end with it.
1507 Returns a pathname to the new directory
1510 return tempfile.mkdtemp(suffix, prefix, parent)
1512 ################################################################################
1514 def is_email_alias(email):
1515 """ checks if the user part of the email is listed in the alias file """
1517 if alias_cache == None:
1518 aliasfn = which_alias_file()
1521 for l in open(aliasfn):
1522 alias_cache.add(l.split(':')[0])
1523 uid = email.split('@')[0]
1524 return uid in alias_cache
1526 ################################################################################
1528 def get_changes_files(from_dir):
1530 Takes a directory and lists all .changes files in it (as well as chdir'ing
1531 to the directory; this is due to broken behaviour on the part of p-u/p-a
1532 when you're not in the right place)
1534 Returns a list of filenames
1537 # Much of the rest of p-u/p-a depends on being in the right place
1539 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1540 except OSError as e:
1541 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1543 return changes_files
1545 ################################################################################
1549 Cnf = apt_pkg.newConfiguration()
1550 if not os.getenv("DAK_TEST"):
1551 apt_pkg.ReadConfigFileISC(Cnf,default_config)
1553 if which_conf_file() != default_config:
1554 apt_pkg.ReadConfigFileISC(Cnf,which_conf_file())
1556 ################################################################################
1558 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1560 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1561 Well, actually it parsed a local copy, but let's document the source
1564 returns a dict associating source package name with a list of open wnpp
1565 bugs (Yes, there might be more than one)
1571 lines = f.readlines()
1572 except IOError as e:
1573 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1578 splited_line = line.split(": ", 1)
1579 if len(splited_line) > 1:
1580 wnpp[splited_line[0]] = splited_line[1].split("|")
1582 for source in wnpp.keys():
1584 for wnpp_bug in wnpp[source]:
1585 bug_no = re.search("(\d)+", wnpp_bug).group()
1591 ################################################################################
1593 def get_packages_from_ftp(root, suite, component, architecture):
1595 Returns an object containing apt_pkg-parseable data collected by
1596 aggregating Packages.gz files gathered for each architecture.
1599 @param root: path to ftp archive root directory
1602 @param suite: suite to extract files from
1604 @type component: string
1605 @param component: component to extract files from
1607 @type architecture: string
1608 @param architecture: architecture to extract files from
1611 @return: apt_pkg class containing package data
1614 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1615 (fd, temp_file) = temp_filename()
1616 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1618 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1619 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1620 if os.path.exists(filename):
1621 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1623 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1624 packages = open_file(temp_file)
1625 Packages = apt_pkg.ParseTagFile(packages)
1626 os.unlink(temp_file)