2 # vim:set et ts=4 sw=4:
6 @contact: Debian FTP Master <ftpmaster@debian.org>
7 @copyright: 2000, 2001, 2002, 2003, 2004, 2005, 2006 James Troup <james@nocrew.org>
8 @license: GNU General Public License version 2 or later
11 # This program is free software; you can redistribute it and/or modify
12 # it under the terms of the GNU General Public License as published by
13 # the Free Software Foundation; either version 2 of the License, or
14 # (at your option) any later version.
16 # This program is distributed in the hope that it will be useful,
17 # but WITHOUT ANY WARRANTY; without even the implied warranty of
18 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
19 # GNU General Public License for more details.
21 # You should have received a copy of the GNU General Public License
22 # along with this program; if not, write to the Free Software
23 # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
41 import email as modemail
44 from dbconn import DBConn, get_architecture, get_component, get_suite, \
45 get_override_type, Keyring, session_wrapper, \
46 get_active_keyring_paths, get_primary_keyring_path
47 from sqlalchemy import desc
48 from dak_exceptions import *
49 from gpg import SignedFile
50 from textutils import fix_maintainer
51 from regexes import re_html_escaping, html_escaping, re_single_line_field, \
52 re_multi_line_field, re_srchasver, re_taint_free, \
53 re_gpg_uid, re_re_mark, re_whitespace_comment, re_issource, \
56 from formats import parse_format, validate_changes_format
57 from srcformats import get_format_from_string
58 from collections import defaultdict
60 ################################################################################
62 default_config = "/etc/dak/dak.conf" #: default dak config, defines host properties
63 default_apt_config = "/etc/dak/apt.conf" #: default apt config, not normally used
65 alias_cache = None #: Cache for email alias checks
66 key_uid_email_cache = {} #: Cache for email addresses from gpg key uids
68 # (hashname, function, earliest_changes_version)
69 known_hashes = [("sha1", apt_pkg.sha1sum, (1, 8)),
70 ("sha256", apt_pkg.sha256sum, (1, 8))] #: hashes we accept for entries in .changes/.dsc
72 # Monkeypatch commands.getstatusoutput as it may not return the correct exit
73 # code in lenny's Python. This also affects commands.getoutput and
75 def dak_getstatusoutput(cmd):
76 pipe = subprocess.Popen(cmd, shell=True, universal_newlines=True,
77 stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
79 output = pipe.stdout.read()
83 if output[-1:] == '\n':
91 commands.getstatusoutput = dak_getstatusoutput
93 ################################################################################
96 """ Escape html chars """
97 return re_html_escaping.sub(lambda x: html_escaping.get(x.group(0)), s)
99 ################################################################################
101 def open_file(filename, mode='r'):
103 Open C{file}, return fileobject.
105 @type filename: string
106 @param filename: path/filename to open
109 @param mode: open mode
112 @return: open fileobject
114 @raise CantOpenError: If IOError is raised by open, reraise it as CantOpenError.
118 f = open(filename, mode)
120 raise CantOpenError(filename)
123 ################################################################################
125 def our_raw_input(prompt=""):
129 sys.stdout.write(prompt)
138 sys.stderr.write("\nUser interrupt (^D).\n")
141 ################################################################################
143 def extract_component_from_section(section, session=None):
146 if section.find('/') != -1:
147 component = section.split('/')[0]
149 # Expand default component
151 comp = get_component(section, session)
155 component = comp.component_name
157 return (section, component)
159 ################################################################################
161 def parse_deb822(armored_contents, signing_rules=0, keyrings=None, session=None):
162 require_signature = True
165 require_signature = False
167 signed_file = SignedFile(armored_contents, keyrings=keyrings, require_signature=require_signature)
168 contents = signed_file.contents
173 # Split the lines in the input, keeping the linebreaks.
174 lines = contents.splitlines(True)
177 raise ParseChangesError("[Empty changes file]")
179 # Reindex by line number so we can easily verify the format of
185 indexed_lines[index] = line[:-1]
187 num_of_lines = len(indexed_lines.keys())
190 while index < num_of_lines:
192 line = indexed_lines[index]
193 if line == "" and signing_rules == 1:
194 if index != num_of_lines:
195 raise InvalidDscError(index)
197 slf = re_single_line_field.match(line)
199 field = slf.groups()[0].lower()
200 changes[field] = slf.groups()[1]
204 changes[field] += '\n'
206 mlf = re_multi_line_field.match(line)
209 raise ParseChangesError("'%s'\n [Multi-line field continuing on from nothing?]" % (line))
210 if first == 1 and changes[field] != "":
211 changes[field] += '\n'
213 changes[field] += mlf.groups()[0] + '\n'
217 changes["filecontents"] = armored_contents
219 if changes.has_key("source"):
220 # Strip the source version in brackets from the source field,
221 # put it in the "source-version" field instead.
222 srcver = re_srchasver.search(changes["source"])
224 changes["source"] = srcver.group(1)
225 changes["source-version"] = srcver.group(2)
228 raise ParseChangesError(error)
232 ################################################################################
234 def parse_changes(filename, signing_rules=0, dsc_file=0, keyrings=None):
236 Parses a changes file and returns a dictionary where each field is a
237 key. The mandatory first argument is the filename of the .changes
240 signing_rules is an optional argument:
242 - If signing_rules == -1, no signature is required.
243 - If signing_rules == 0 (the default), a signature is required.
244 - If signing_rules == 1, it turns on the same strict format checking
247 The rules for (signing_rules == 1)-mode are:
249 - The PGP header consists of "-----BEGIN PGP SIGNED MESSAGE-----"
250 followed by any PGP header data and must end with a blank line.
252 - The data section must end with a blank line and must be followed by
253 "-----BEGIN PGP SIGNATURE-----".
256 changes_in = open_file(filename)
257 content = changes_in.read()
260 unicode(content, 'utf-8')
262 raise ChangesUnicodeError("Changes file not proper utf-8")
263 changes = parse_deb822(content, signing_rules, keyrings=keyrings)
267 # Finally ensure that everything needed for .changes is there
268 must_keywords = ('Format', 'Date', 'Source', 'Binary', 'Architecture', 'Version',
269 'Distribution', 'Maintainer', 'Description', 'Changes', 'Files')
272 for keyword in must_keywords:
273 if not changes.has_key(keyword.lower()):
274 missingfields.append(keyword)
276 if len(missingfields):
277 raise ParseChangesError("Missing mandantory field(s) in changes file (policy 5.5): %s" % (missingfields))
281 ################################################################################
283 def hash_key(hashname):
284 return '%ssum' % hashname
286 ################################################################################
288 def create_hash(where, files, hashname, hashfunc):
290 create_hash extends the passed files dict with the given hash by
291 iterating over all files on disk and passing them to the hashing
296 for f in files.keys():
298 file_handle = open_file(f)
299 except CantOpenError:
300 rejmsg.append("Could not open file %s for checksumming" % (f))
303 files[f][hash_key(hashname)] = hashfunc(file_handle)
308 ################################################################################
310 def check_hash(where, files, hashname, hashfunc):
312 check_hash checks the given hash in the files dict against the actual
313 files on disk. The hash values need to be present consistently in
314 all file entries. It does not modify its input in any way.
318 for f in files.keys():
322 file_handle = open_file(f)
324 # Check for the hash entry, to not trigger a KeyError.
325 if not files[f].has_key(hash_key(hashname)):
326 rejmsg.append("%s: misses %s checksum in %s" % (f, hashname,
330 # Actually check the hash for correctness.
331 if hashfunc(file_handle) != files[f][hash_key(hashname)]:
332 rejmsg.append("%s: %s check failed in %s" % (f, hashname,
334 except CantOpenError:
335 # TODO: This happens when the file is in the pool.
336 # warn("Cannot open file %s" % f)
343 ################################################################################
345 def check_size(where, files):
347 check_size checks the file sizes in the passed files dict against the
352 for f in files.keys():
355 except OSError as exc:
357 # TODO: This happens when the file is in the pool.
361 actual_size = entry[stat.ST_SIZE]
362 size = int(files[f]["size"])
363 if size != actual_size:
364 rejmsg.append("%s: actual file size (%s) does not match size (%s) in %s"
365 % (f, actual_size, size, where))
368 ################################################################################
370 def check_dsc_files(dsc_filename, dsc=None, dsc_files=None):
372 Verify that the files listed in the Files field of the .dsc are
373 those expected given the announced Format.
375 @type dsc_filename: string
376 @param dsc_filename: path of .dsc file
379 @param dsc: the content of the .dsc parsed by C{parse_changes()}
381 @type dsc_files: dict
382 @param dsc_files: the file list returned by C{build_file_list()}
385 @return: all errors detected
389 # Parse the file if needed
391 dsc = parse_changes(dsc_filename, signing_rules=1, dsc_file=1);
393 if dsc_files is None:
394 dsc_files = build_file_list(dsc, is_a_dsc=1)
396 # Ensure .dsc lists proper set of source files according to the format
398 has = defaultdict(lambda: 0)
401 (r'orig.tar.gz', ('orig_tar_gz', 'orig_tar')),
402 (r'diff.gz', ('debian_diff',)),
403 (r'tar.gz', ('native_tar_gz', 'native_tar')),
404 (r'debian\.tar\.(gz|bz2|xz)', ('debian_tar',)),
405 (r'orig\.tar\.(gz|bz2|xz)', ('orig_tar',)),
406 (r'tar\.(gz|bz2|xz)', ('native_tar',)),
407 (r'orig-.+\.tar\.(gz|bz2|xz)', ('more_orig_tar',)),
410 for f in dsc_files.keys():
411 m = re_issource.match(f)
413 rejmsg.append("%s: %s in Files field not recognised as source."
417 # Populate 'has' dictionary by resolving keys in lookup table
419 for regex, keys in ftype_lookup:
420 if re.match(regex, m.group(3)):
426 # File does not match anything in lookup table; reject
428 reject("%s: unexpected source file '%s'" % (dsc_filename, f))
430 # Check for multiple files
431 for file_type in ('orig_tar', 'native_tar', 'debian_tar', 'debian_diff'):
432 if has[file_type] > 1:
433 rejmsg.append("%s: lists multiple %s" % (dsc_filename, file_type))
435 # Source format specific tests
437 format = get_format_from_string(dsc['format'])
439 '%s: %s' % (dsc_filename, x) for x in format.reject_msgs(has)
442 except UnknownFormatError:
443 # Not an error here for now
448 ################################################################################
450 def check_hash_fields(what, manifest):
452 check_hash_fields ensures that there are no checksum fields in the
453 given dict that we do not know about.
457 hashes = map(lambda x: x[0], known_hashes)
458 for field in manifest:
459 if field.startswith("checksums-"):
460 hashname = field.split("-",1)[1]
461 if hashname not in hashes:
462 rejmsg.append("Unsupported checksum field for %s "\
463 "in %s" % (hashname, what))
466 ################################################################################
468 def _ensure_changes_hash(changes, format, version, files, hashname, hashfunc):
469 if format >= version:
470 # The version should contain the specified hash.
473 # Import hashes from the changes
474 rejmsg = parse_checksums(".changes", files, changes, hashname)
478 # We need to calculate the hash because it can't possibly
481 return func(".changes", files, hashname, hashfunc)
483 # We could add the orig which might be in the pool to the files dict to
484 # access the checksums easily.
486 def _ensure_dsc_hash(dsc, dsc_files, hashname, hashfunc):
488 ensure_dsc_hashes' task is to ensure that each and every *present* hash
489 in the dsc is correct, i.e. identical to the changes file and if necessary
490 the pool. The latter task is delegated to check_hash.
494 if not dsc.has_key('Checksums-%s' % (hashname,)):
496 # Import hashes from the dsc
497 parse_checksums(".dsc", dsc_files, dsc, hashname)
499 rejmsg.extend(check_hash(".dsc", dsc_files, hashname, hashfunc))
502 ################################################################################
504 def parse_checksums(where, files, manifest, hashname):
506 field = 'checksums-%s' % hashname
507 if not field in manifest:
509 for line in manifest[field].split('\n'):
512 clist = line.strip().split(' ')
514 checksum, size, checkfile = clist
516 rejmsg.append("Cannot parse checksum line [%s]" % (line))
518 if not files.has_key(checkfile):
519 # TODO: check for the file's entry in the original files dict, not
520 # the one modified by (auto)byhand and other weird stuff
521 # rejmsg.append("%s: not present in files but in checksums-%s in %s" %
522 # (file, hashname, where))
524 if not files[checkfile]["size"] == size:
525 rejmsg.append("%s: size differs for files and checksums-%s entry "\
526 "in %s" % (checkfile, hashname, where))
528 files[checkfile][hash_key(hashname)] = checksum
529 for f in files.keys():
530 if not files[f].has_key(hash_key(hashname)):
531 rejmsg.append("%s: no entry in checksums-%s in %s" % (f, hashname, where))
534 ################################################################################
536 # Dropped support for 1.4 and ``buggy dchanges 3.4'' (?!) compared to di.pl
538 def build_file_list(changes, is_a_dsc=0, field="files", hashname="md5sum"):
541 # Make sure we have a Files: field to parse...
542 if not changes.has_key(field):
543 raise NoFilesFieldError
545 # Validate .changes Format: field
547 validate_changes_format(parse_format(changes['format']), field)
549 includes_section = (not is_a_dsc) and field == "files"
551 # Parse each entry/line:
552 for i in changes[field].split('\n'):
556 section = priority = ""
559 (md5, size, section, priority, name) = s
561 (md5, size, name) = s
563 raise ParseChangesError(i)
570 (section, component) = extract_component_from_section(section)
572 files[name] = dict(size=size, section=section,
573 priority=priority, component=component)
574 files[name][hashname] = md5
578 ################################################################################
580 # see http://bugs.debian.org/619131
581 def build_package_list(dsc, session = None):
582 if not dsc.has_key("package-list"):
587 for line in dsc["package-list"].split("\n"):
591 fields = line.split()
593 package_type = fields[1]
594 (section, component) = extract_component_from_section(fields[2])
597 # Validate type if we have a session
598 if session and get_override_type(package_type, session) is None:
599 # Maybe just warn and ignore? exit(1) might be a bit hard...
600 utils.fubar("invalid type (%s) in Package-List." % (package_type))
602 if name not in packages or packages[name]["type"] == "dsc":
603 packages[name] = dict(priority=priority, section=section, type=package_type, component=component, files=[])
607 ################################################################################
609 def send_mail (message, filename=""):
610 """sendmail wrapper, takes _either_ a message string or a file as arguments"""
612 maildir = Cnf.get('Dir::Mail')
614 path = os.path.join(maildir, datetime.datetime.now().isoformat())
615 path = find_next_free(path)
620 # Check whether we're supposed to be sending mail
621 if Cnf.has_key("Dinstall::Options::No-Mail") and Cnf["Dinstall::Options::No-Mail"]:
624 # If we've been passed a string dump it into a temporary file
626 (fd, filename) = tempfile.mkstemp()
627 os.write (fd, message)
630 if Cnf.has_key("Dinstall::MailWhiteList") and \
631 Cnf["Dinstall::MailWhiteList"] != "":
632 message_in = open_file(filename)
633 message_raw = modemail.message_from_file(message_in)
637 whitelist_in = open_file(Cnf["Dinstall::MailWhiteList"])
639 for line in whitelist_in:
640 if not re_whitespace_comment.match(line):
641 if re_re_mark.match(line):
642 whitelist.append(re.compile(re_re_mark.sub("", line.strip(), 1)))
644 whitelist.append(re.compile(re.escape(line.strip())))
649 fields = ["To", "Bcc", "Cc"]
652 value = message_raw.get(field, None)
655 for item in value.split(","):
656 (rfc822_maint, rfc2047_maint, name, email) = fix_maintainer(item.strip())
662 if not mail_whitelisted:
663 print "Skipping %s since it's not in %s" % (item, Cnf["Dinstall::MailWhiteList"])
667 # Doesn't have any mail in whitelist so remove the header
669 del message_raw[field]
671 message_raw.replace_header(field, ', '.join(match))
673 # Change message fields in order if we don't have a To header
674 if not message_raw.has_key("To"):
677 if message_raw.has_key(field):
678 message_raw[fields[-1]] = message_raw[field]
679 del message_raw[field]
682 # Clean up any temporary files
683 # and return, as we removed all recipients.
685 os.unlink (filename);
688 fd = os.open(filename, os.O_RDWR|os.O_EXCL, 0o700);
689 os.write (fd, message_raw.as_string(True));
693 (result, output) = commands.getstatusoutput("%s < %s" % (Cnf["Dinstall::SendmailCommand"], filename))
695 raise SendmailFailedError(output)
697 # Clean up any temporary files
701 ################################################################################
703 def poolify (source, component):
706 if source[:3] == "lib":
707 return component + source[:4] + '/' + source + '/'
709 return component + source[:1] + '/' + source + '/'
711 ################################################################################
713 def move (src, dest, overwrite = 0, perms = 0o664):
714 if os.path.exists(dest) and os.path.isdir(dest):
717 dest_dir = os.path.dirname(dest)
718 if not os.path.exists(dest_dir):
719 umask = os.umask(00000)
720 os.makedirs(dest_dir, 0o2775)
722 #print "Moving %s to %s..." % (src, dest)
723 if os.path.exists(dest) and os.path.isdir(dest):
724 dest += '/' + os.path.basename(src)
725 # Don't overwrite unless forced to
726 if os.path.exists(dest):
728 fubar("Can't move %s to %s - file already exists." % (src, dest))
730 if not os.access(dest, os.W_OK):
731 fubar("Can't move %s to %s - can't write to existing file." % (src, dest))
732 shutil.copy2(src, dest)
733 os.chmod(dest, perms)
736 def copy (src, dest, overwrite = 0, perms = 0o664):
737 if os.path.exists(dest) and os.path.isdir(dest):
740 dest_dir = os.path.dirname(dest)
741 if not os.path.exists(dest_dir):
742 umask = os.umask(00000)
743 os.makedirs(dest_dir, 0o2775)
745 #print "Copying %s to %s..." % (src, dest)
746 if os.path.exists(dest) and os.path.isdir(dest):
747 dest += '/' + os.path.basename(src)
748 # Don't overwrite unless forced to
749 if os.path.exists(dest):
751 raise FileExistsError
753 if not os.access(dest, os.W_OK):
754 raise CantOverwriteError
755 shutil.copy2(src, dest)
756 os.chmod(dest, perms)
758 ################################################################################
761 res = socket.getfqdn()
762 database_hostname = Cnf.get("Config::" + res + "::DatabaseHostname")
763 if database_hostname:
764 return database_hostname
768 def which_conf_file ():
769 if os.getenv('DAK_CONFIG'):
770 return os.getenv('DAK_CONFIG')
772 res = socket.getfqdn()
773 # In case we allow local config files per user, try if one exists
774 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
775 homedir = os.getenv("HOME")
776 confpath = os.path.join(homedir, "/etc/dak.conf")
777 if os.path.exists(confpath):
778 apt_pkg.ReadConfigFileISC(Cnf,confpath)
780 # We are still in here, so there is no local config file or we do
781 # not allow local files. Do the normal stuff.
782 if Cnf.get("Config::" + res + "::DakConfig"):
783 return Cnf["Config::" + res + "::DakConfig"]
785 return default_config
787 def which_apt_conf_file ():
788 res = socket.getfqdn()
789 # In case we allow local config files per user, try if one exists
790 if Cnf.find_b("Config::" + res + "::AllowLocalConfig"):
791 homedir = os.getenv("HOME")
792 confpath = os.path.join(homedir, "/etc/dak.conf")
793 if os.path.exists(confpath):
794 apt_pkg.ReadConfigFileISC(Cnf,default_config)
796 if Cnf.get("Config::" + res + "::AptConfig"):
797 return Cnf["Config::" + res + "::AptConfig"]
799 return default_apt_config
801 def which_alias_file():
802 hostname = socket.getfqdn()
803 aliasfn = '/var/lib/misc/'+hostname+'/forward-alias'
804 if os.path.exists(aliasfn):
809 ################################################################################
811 def TemplateSubst(subst_map, filename):
812 """ Perform a substition of template """
813 templatefile = open_file(filename)
814 template = templatefile.read()
815 for k, v in subst_map.iteritems():
816 template = template.replace(k, str(v))
820 ################################################################################
822 def fubar(msg, exit_code=1):
823 sys.stderr.write("E: %s\n" % (msg))
827 sys.stderr.write("W: %s\n" % (msg))
829 ################################################################################
831 # Returns the user name with a laughable attempt at rfc822 conformancy
832 # (read: removing stray periods).
834 return pwd.getpwuid(os.getuid())[4].split(',')[0].replace('.', '')
837 return pwd.getpwuid(os.getuid())[0]
839 ################################################################################
849 return ("%d%s" % (c, t))
851 ################################################################################
853 def cc_fix_changes (changes):
854 o = changes.get("architecture", "")
856 del changes["architecture"]
857 changes["architecture"] = {}
859 changes["architecture"][j] = 1
861 def changes_compare (a, b):
862 """ Sort by source name, source version, 'have source', and then by filename """
864 a_changes = parse_changes(a)
869 b_changes = parse_changes(b)
873 cc_fix_changes (a_changes)
874 cc_fix_changes (b_changes)
876 # Sort by source name
877 a_source = a_changes.get("source")
878 b_source = b_changes.get("source")
879 q = cmp (a_source, b_source)
883 # Sort by source version
884 a_version = a_changes.get("version", "0")
885 b_version = b_changes.get("version", "0")
886 q = apt_pkg.version_compare(a_version, b_version)
890 # Sort by 'have source'
891 a_has_source = a_changes["architecture"].get("source")
892 b_has_source = b_changes["architecture"].get("source")
893 if a_has_source and not b_has_source:
895 elif b_has_source and not a_has_source:
898 # Fall back to sort by filename
901 ################################################################################
903 def find_next_free (dest, too_many=100):
906 while os.path.exists(dest) and extra < too_many:
907 dest = orig_dest + '.' + repr(extra)
909 if extra >= too_many:
910 raise NoFreeFilenameError
913 ################################################################################
915 def result_join (original, sep = '\t'):
917 for i in xrange(len(original)):
918 if original[i] == None:
919 resultlist.append("")
921 resultlist.append(original[i])
922 return sep.join(resultlist)
924 ################################################################################
926 def prefix_multi_line_string(str, prefix, include_blank_lines=0):
928 for line in str.split('\n'):
930 if line or include_blank_lines:
931 out += "%s%s\n" % (prefix, line)
932 # Strip trailing new line
937 ################################################################################
939 def validate_changes_file_arg(filename, require_changes=1):
941 'filename' is either a .changes or .dak file. If 'filename' is a
942 .dak file, it's changed to be the corresponding .changes file. The
943 function then checks if the .changes file a) exists and b) is
944 readable and returns the .changes filename if so. If there's a
945 problem, the next action depends on the option 'require_changes'
948 - If 'require_changes' == -1, errors are ignored and the .changes
949 filename is returned.
950 - If 'require_changes' == 0, a warning is given and 'None' is returned.
951 - If 'require_changes' == 1, a fatal error is raised.
956 orig_filename = filename
957 if filename.endswith(".dak"):
958 filename = filename[:-4]+".changes"
960 if not filename.endswith(".changes"):
961 error = "invalid file type; not a changes file"
963 if not os.access(filename,os.R_OK):
964 if os.path.exists(filename):
965 error = "permission denied"
967 error = "file not found"
970 if require_changes == 1:
971 fubar("%s: %s." % (orig_filename, error))
972 elif require_changes == 0:
973 warn("Skipping %s - %s" % (orig_filename, error))
975 else: # We only care about the .dak file
980 ################################################################################
983 return (arch != "source" and arch != "all")
985 ################################################################################
987 def join_with_commas_and(list):
988 if len(list) == 0: return "nothing"
989 if len(list) == 1: return list[0]
990 return ", ".join(list[:-1]) + " and " + list[-1]
992 ################################################################################
997 (pkg, version, constraint) = atom
999 pp_dep = "%s (%s %s)" % (pkg, constraint, version)
1002 pp_deps.append(pp_dep)
1003 return " |".join(pp_deps)
1005 ################################################################################
1010 ################################################################################
1012 def parse_args(Options):
1013 """ Handle -a, -c and -s arguments; returns them as SQL constraints """
1014 # XXX: This should go away and everything which calls it be converted
1015 # to use SQLA properly. For now, we'll just fix it not to use
1016 # the old Pg interface though
1017 session = DBConn().session()
1019 if Options["Suite"]:
1021 for suitename in split_args(Options["Suite"]):
1022 suite = get_suite(suitename, session=session)
1023 if not suite or suite.suite_id is None:
1024 warn("suite '%s' not recognised." % (suite and suite.suite_name or suitename))
1026 suite_ids_list.append(suite.suite_id)
1028 con_suites = "AND su.id IN (%s)" % ", ".join([ str(i) for i in suite_ids_list ])
1030 fubar("No valid suite given.")
1035 if Options["Component"]:
1036 component_ids_list = []
1037 for componentname in split_args(Options["Component"]):
1038 component = get_component(componentname, session=session)
1039 if component is None:
1040 warn("component '%s' not recognised." % (componentname))
1042 component_ids_list.append(component.component_id)
1043 if component_ids_list:
1044 con_components = "AND c.id IN (%s)" % ", ".join([ str(i) for i in component_ids_list ])
1046 fubar("No valid component given.")
1050 # Process architecture
1051 con_architectures = ""
1053 if Options["Architecture"]:
1055 for archname in split_args(Options["Architecture"]):
1056 if archname == "source":
1059 arch = get_architecture(archname, session=session)
1061 warn("architecture '%s' not recognised." % (archname))
1063 arch_ids_list.append(arch.arch_id)
1065 con_architectures = "AND a.id IN (%s)" % ", ".join([ str(i) for i in arch_ids_list ])
1067 if not check_source:
1068 fubar("No valid architecture given.")
1072 return (con_suites, con_architectures, con_components, check_source)
1074 ################################################################################
1076 def arch_compare_sw (a, b):
1078 Function for use in sorting lists of architectures.
1080 Sorts normally except that 'source' dominates all others.
1083 if a == "source" and b == "source":
1092 ################################################################################
1094 def split_args (s, dwim=1):
1096 Split command line arguments which can be separated by either commas
1097 or whitespace. If dwim is set, it will complain about string ending
1098 in comma since this usually means someone did 'dak ls -a i386, m68k
1099 foo' or something and the inevitable confusion resulting from 'm68k'
1100 being treated as an argument is undesirable.
1103 if s.find(",") == -1:
1106 if s[-1:] == "," and dwim:
1107 fubar("split_args: found trailing comma, spurious space maybe?")
1110 ################################################################################
1112 def gpgv_get_status_output(cmd, status_read, status_write):
1114 Our very own version of commands.getouputstatus(), hacked to support
1118 cmd = ['/bin/sh', '-c', cmd]
1119 p2cread, p2cwrite = os.pipe()
1120 c2pread, c2pwrite = os.pipe()
1121 errout, errin = os.pipe()
1131 for i in range(3, 256):
1132 if i != status_write:
1138 os.execvp(cmd[0], cmd)
1144 os.dup2(c2pread, c2pwrite)
1145 os.dup2(errout, errin)
1147 output = status = ""
1149 i, o, e = select.select([c2pwrite, errin, status_read], [], [])
1152 r = os.read(fd, 8196)
1154 more_data.append(fd)
1155 if fd == c2pwrite or fd == errin:
1157 elif fd == status_read:
1160 fubar("Unexpected file descriptor [%s] returned from select\n" % (fd))
1162 pid, exit_status = os.waitpid(pid, 0)
1164 os.close(status_write)
1165 os.close(status_read)
1175 return output, status, exit_status
1177 ################################################################################
1179 def process_gpgv_output(status):
1180 # Process the status-fd output
1183 for line in status.split('\n'):
1187 split = line.split()
1189 internal_error += "gpgv status line is malformed (< 2 atoms) ['%s'].\n" % (line)
1191 (gnupg, keyword) = split[:2]
1192 if gnupg != "[GNUPG:]":
1193 internal_error += "gpgv status line is malformed (incorrect prefix '%s').\n" % (gnupg)
1196 if keywords.has_key(keyword) and keyword not in [ "NODATA", "SIGEXPIRED", "KEYEXPIRED" ]:
1197 internal_error += "found duplicate status token ('%s').\n" % (keyword)
1200 keywords[keyword] = args
1202 return (keywords, internal_error)
1204 ################################################################################
1206 def retrieve_key (filename, keyserver=None, keyring=None):
1208 Retrieve the key that signed 'filename' from 'keyserver' and
1209 add it to 'keyring'. Returns nothing on success, or an error message
1213 # Defaults for keyserver and keyring
1215 keyserver = Cnf["Dinstall::KeyServer"]
1217 keyring = get_primary_keyring_path()
1219 # Ensure the filename contains no shell meta-characters or other badness
1220 if not re_taint_free.match(filename):
1221 return "%s: tainted filename" % (filename)
1223 # Invoke gpgv on the file
1224 status_read, status_write = os.pipe()
1225 cmd = "gpgv --status-fd %s --keyring /dev/null %s" % (status_write, filename)
1226 (_, status, _) = gpgv_get_status_output(cmd, status_read, status_write)
1228 # Process the status-fd output
1229 (keywords, internal_error) = process_gpgv_output(status)
1231 return internal_error
1233 if not keywords.has_key("NO_PUBKEY"):
1234 return "didn't find expected NO_PUBKEY in gpgv status-fd output"
1236 fingerprint = keywords["NO_PUBKEY"][0]
1237 # XXX - gpg sucks. You can't use --secret-keyring=/dev/null as
1238 # it'll try to create a lockfile in /dev. A better solution might
1239 # be a tempfile or something.
1240 cmd = "gpg --no-default-keyring --secret-keyring=%s --no-options" \
1241 % (Cnf["Dinstall::SigningKeyring"])
1242 cmd += " --keyring %s --keyserver %s --recv-key %s" \
1243 % (keyring, keyserver, fingerprint)
1244 (result, output) = commands.getstatusoutput(cmd)
1246 return "'%s' failed with exit code %s" % (cmd, result)
1250 ################################################################################
1252 def gpg_keyring_args(keyrings=None):
1254 keyrings = get_active_keyring_paths()
1256 return " ".join(["--keyring %s" % x for x in keyrings])
1258 ################################################################################
1260 def check_signature (sig_filename, data_filename="", keyrings=None, autofetch=None, session=None):
1262 Check the signature of a file and return the fingerprint if the
1263 signature is valid or 'None' if it's not. The first argument is the
1264 filename whose signature should be checked. The second argument is a
1265 reject function and is called when an error is found. The reject()
1266 function must allow for two arguments: the first is the error message,
1267 the second is an optional prefix string. It's possible for reject()
1268 to be called more than once during an invocation of check_signature().
1269 The third argument is optional and is the name of the files the
1270 detached signature applies to. The fourth argument is optional and is
1271 a *list* of keyrings to use. 'autofetch' can either be None, True or
1272 False. If None, the default behaviour specified in the config will be
1278 # Ensure the filename contains no shell meta-characters or other badness
1279 if not re_taint_free.match(sig_filename):
1280 rejects.append("!!WARNING!! tainted signature filename: '%s'." % (sig_filename))
1281 return (None, rejects)
1283 if data_filename and not re_taint_free.match(data_filename):
1284 rejects.append("!!WARNING!! tainted data filename: '%s'." % (data_filename))
1285 return (None, rejects)
1288 keyrings = [ x.keyring_name for x in session.query(Keyring).filter(Keyring.active == True).all() ]
1290 # Autofetch the signing key if that's enabled
1291 if autofetch == None:
1292 autofetch = Cnf.get("Dinstall::KeyAutoFetch")
1294 error_msg = retrieve_key(sig_filename)
1296 rejects.append(error_msg)
1297 return (None, rejects)
1299 # Build the command line
1300 status_read, status_write = os.pipe()
1301 cmd = "gpgv --status-fd %s %s %s %s" % (
1302 status_write, gpg_keyring_args(keyrings), sig_filename, data_filename)
1304 # Invoke gpgv on the file
1305 (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write)
1307 # Process the status-fd output
1308 (keywords, internal_error) = process_gpgv_output(status)
1310 # If we failed to parse the status-fd output, let's just whine and bail now
1312 rejects.append("internal error while performing signature check on %s." % (sig_filename))
1313 rejects.append(internal_error, "")
1314 rejects.append("Please report the above errors to the Archive maintainers by replying to this mail.", "")
1315 return (None, rejects)
1317 # Now check for obviously bad things in the processed output
1318 if keywords.has_key("KEYREVOKED"):
1319 rejects.append("The key used to sign %s has been revoked." % (sig_filename))
1320 if keywords.has_key("BADSIG"):
1321 rejects.append("bad signature on %s." % (sig_filename))
1322 if keywords.has_key("ERRSIG") and not keywords.has_key("NO_PUBKEY"):
1323 rejects.append("failed to check signature on %s." % (sig_filename))
1324 if keywords.has_key("NO_PUBKEY"):
1325 args = keywords["NO_PUBKEY"]
1328 rejects.append("The key (0x%s) used to sign %s wasn't found in the keyring(s)." % (key, sig_filename))
1329 if keywords.has_key("BADARMOR"):
1330 rejects.append("ASCII armour of signature was corrupt in %s." % (sig_filename))
1331 if keywords.has_key("NODATA"):
1332 rejects.append("no signature found in %s." % (sig_filename))
1333 if keywords.has_key("EXPKEYSIG"):
1334 args = keywords["EXPKEYSIG"]
1337 rejects.append("Signature made by expired key 0x%s" % (key))
1338 if keywords.has_key("KEYEXPIRED") and not keywords.has_key("GOODSIG"):
1339 args = keywords["KEYEXPIRED"]
1343 if timestamp.count("T") == 0:
1345 expiredate = time.strftime("%Y-%m-%d", time.gmtime(float(timestamp)))
1347 expiredate = "unknown (%s)" % (timestamp)
1349 expiredate = timestamp
1350 rejects.append("The key used to sign %s has expired on %s" % (sig_filename, expiredate))
1352 if len(rejects) > 0:
1353 return (None, rejects)
1355 # Next check gpgv exited with a zero return code
1357 rejects.append("gpgv failed while checking %s." % (sig_filename))
1359 rejects.append(prefix_multi_line_string(status, " [GPG status-fd output:] "))
1361 rejects.append(prefix_multi_line_string(output, " [GPG output:] "))
1362 return (None, rejects)
1364 # Sanity check the good stuff we expect
1365 if not keywords.has_key("VALIDSIG"):
1366 rejects.append("signature on %s does not appear to be valid [No VALIDSIG]." % (sig_filename))
1368 args = keywords["VALIDSIG"]
1370 rejects.append("internal error while checking signature on %s." % (sig_filename))
1372 fingerprint = args[0]
1373 if not keywords.has_key("GOODSIG"):
1374 rejects.append("signature on %s does not appear to be valid [No GOODSIG]." % (sig_filename))
1375 if not keywords.has_key("SIG_ID"):
1376 rejects.append("signature on %s does not appear to be valid [No SIG_ID]." % (sig_filename))
1378 # Finally ensure there's not something we don't recognise
1379 known_keywords = dict(VALIDSIG="",SIG_ID="",GOODSIG="",BADSIG="",ERRSIG="",
1380 SIGEXPIRED="",KEYREVOKED="",NO_PUBKEY="",BADARMOR="",
1381 NODATA="",NOTATION_DATA="",NOTATION_NAME="",KEYEXPIRED="",POLICY_URL="")
1383 for keyword in keywords.keys():
1384 if not known_keywords.has_key(keyword):
1385 rejects.append("found unknown status token '%s' from gpgv with args '%r' in %s." % (keyword, keywords[keyword], sig_filename))
1387 if len(rejects) > 0:
1388 return (None, rejects)
1390 return (fingerprint, [])
1392 ################################################################################
1394 def gpg_get_key_addresses(fingerprint):
1395 """retreive email addresses from gpg key uids for a given fingerprint"""
1396 addresses = key_uid_email_cache.get(fingerprint)
1397 if addresses != None:
1400 cmd = "gpg --no-default-keyring %s --fingerprint %s" \
1401 % (gpg_keyring_args(), fingerprint)
1402 (result, output) = commands.getstatusoutput(cmd)
1404 for l in output.split('\n'):
1405 m = re_gpg_uid.match(l)
1407 addresses.append(m.group(1))
1408 key_uid_email_cache[fingerprint] = addresses
1411 ################################################################################
1413 def clean_symlink (src, dest, root):
1415 Relativize an absolute symlink from 'src' -> 'dest' relative to 'root'.
1418 src = src.replace(root, '', 1)
1419 dest = dest.replace(root, '', 1)
1420 dest = os.path.dirname(dest)
1421 new_src = '../' * len(dest.split('/'))
1422 return new_src + src
1424 ################################################################################
1426 def temp_filename(directory=None, prefix="dak", suffix=""):
1428 Return a secure and unique filename by pre-creating it.
1429 If 'directory' is non-null, it will be the directory the file is pre-created in.
1430 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1431 If 'suffix' is non-null, the filename will end with it.
1433 Returns a pair (fd, name).
1436 return tempfile.mkstemp(suffix, prefix, directory)
1438 ################################################################################
1440 def temp_dirname(parent=None, prefix="dak", suffix=""):
1442 Return a secure and unique directory by pre-creating it.
1443 If 'parent' is non-null, it will be the directory the directory is pre-created in.
1444 If 'prefix' is non-null, the filename will be prefixed with it, default is dak.
1445 If 'suffix' is non-null, the filename will end with it.
1447 Returns a pathname to the new directory
1450 return tempfile.mkdtemp(suffix, prefix, parent)
1452 ################################################################################
1454 def is_email_alias(email):
1455 """ checks if the user part of the email is listed in the alias file """
1457 if alias_cache == None:
1458 aliasfn = which_alias_file()
1461 for l in open(aliasfn):
1462 alias_cache.add(l.split(':')[0])
1463 uid = email.split('@')[0]
1464 return uid in alias_cache
1466 ################################################################################
1468 def get_changes_files(from_dir):
1470 Takes a directory and lists all .changes files in it (as well as chdir'ing
1471 to the directory; this is due to broken behaviour on the part of p-u/p-a
1472 when you're not in the right place)
1474 Returns a list of filenames
1477 # Much of the rest of p-u/p-a depends on being in the right place
1479 changes_files = [x for x in os.listdir(from_dir) if x.endswith('.changes')]
1480 except OSError as e:
1481 fubar("Failed to read list from directory %s (%s)" % (from_dir, e))
1483 return changes_files
1485 ################################################################################
1489 Cnf = apt_pkg.Configuration()
1490 if not os.getenv("DAK_TEST"):
1491 apt_pkg.read_config_file_isc(Cnf,default_config)
1493 if which_conf_file() != default_config:
1494 apt_pkg.read_config_file_isc(Cnf,which_conf_file())
1496 ################################################################################
1498 def parse_wnpp_bug_file(file = "/srv/ftp-master.debian.org/scripts/masterfiles/wnpp_rm"):
1500 Parses the wnpp bug list available at http://qa.debian.org/data/bts/wnpp_rm
1501 Well, actually it parsed a local copy, but let's document the source
1504 returns a dict associating source package name with a list of open wnpp
1505 bugs (Yes, there might be more than one)
1511 lines = f.readlines()
1512 except IOError as e:
1513 print "Warning: Couldn't open %s; don't know about WNPP bugs, so won't close any." % file
1518 splited_line = line.split(": ", 1)
1519 if len(splited_line) > 1:
1520 wnpp[splited_line[0]] = splited_line[1].split("|")
1522 for source in wnpp.keys():
1524 for wnpp_bug in wnpp[source]:
1525 bug_no = re.search("(\d)+", wnpp_bug).group()
1531 ################################################################################
1533 def get_packages_from_ftp(root, suite, component, architecture):
1535 Returns an object containing apt_pkg-parseable data collected by
1536 aggregating Packages.gz files gathered for each architecture.
1539 @param root: path to ftp archive root directory
1542 @param suite: suite to extract files from
1544 @type component: string
1545 @param component: component to extract files from
1547 @type architecture: string
1548 @param architecture: architecture to extract files from
1551 @return: apt_pkg class containing package data
1554 filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (root, suite, component, architecture)
1555 (fd, temp_file) = temp_filename()
1556 (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_file))
1558 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1559 filename = "%s/dists/%s/%s/debian-installer/binary-%s/Packages.gz" % (root, suite, component, architecture)
1560 if os.path.exists(filename):
1561 (result, output) = commands.getstatusoutput("gunzip -c %s >> %s" % (filename, temp_file))
1563 fubar("Gunzip invocation failed!\n%s\n" % (output), result)
1564 packages = open_file(temp_file)
1565 Packages = apt_pkg.ParseTagFile(packages)
1566 os.unlink(temp_file)
1569 ################################################################################
1571 def deb_extract_control(fh):
1572 """extract DEBIAN/control from a binary package"""
1573 return apt_inst.DebFile(fh).control.extractdata("control")
1575 ################################################################################
1577 def mail_addresses_for_upload(maintainer, changed_by, fingerprint):
1578 """Mail addresses to contact for an upload
1581 maintainer (str): Maintainer field of the changes file
1582 changed_by (str): Changed-By field of the changes file
1583 fingerprint (str): Fingerprint of the PGP key used to sign the upload
1586 List of RFC 2047-encoded mail addresses to contact regarding this upload
1588 addresses = [maintainer]
1589 if changed_by != maintainer:
1590 addresses.append(changed_by)
1592 fpr_addresses = gpg_get_key_addresses(fingerprint)
1593 if len(fpr_addresses) > 0 and fix_maintainer(changed_by)[3] not in fpr_addresses and fix_maintainer(maintainer)[3] not in fpr_addresses:
1594 addresses.append(fpr_addresses[0])
1596 encoded_addresses = [ fix_maintainer(e)[1] for e in addresses ]
1597 return encoded_addresses
1599 ################################################################################
1601 def call_editor(text="", suffix=".txt"):
1602 """Run editor and return the result as a string
1605 text (str): initial text
1606 suffix (str): extension for temporary file
1609 string with the edited text
1611 editor = os.environ.get('VISUAL', os.environ.get('EDITOR', 'vi'))
1612 tmp = tempfile.NamedTemporaryFile(suffix=suffix, delete=False)
1616 subprocess.check_call([editor, tmp.name])
1617 return open(tmp.name, 'r').read()